OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/rtc_video_decoder.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/logging.h" | |
9 #include "base/memory/ref_counted.h" | |
10 #include "base/message_loop_proxy.h" | |
11 #include "base/safe_numerics.h" | |
12 #include "base/stl_util.h" | |
13 #include "base/task_runner_util.h" | |
14 #include "content/child/child_thread.h" | |
15 #include "content/renderer/media/native_handle_impl.h" | |
16 #include "media/base/bind_to_loop.h" | |
17 #include "third_party/webrtc/common_video/interface/texture_video_frame.h" | |
18 #include "third_party/webrtc/system_wrappers/interface/ref_count.h" | |
19 | |
20 namespace content { | |
21 | |
22 static const int32 ID_LAST = 0x3FFFFFFF; // maximum bitstream buffer id | |
23 static const int32 ID_HALF = 0x20000000; | |
24 | |
25 // Maximum number of concurrent VDA::Decode() operations RVD will maintain. | |
26 // Higher values allow better pipelining in the GPU, but also require more | |
27 // resources. | |
28 static const size_t kMaxInFlightDecodes = 8; | |
29 | |
30 // Size of shared-memory segments we allocate. Since we reuse them we let them | |
31 // be on the beefy side. | |
32 static const size_t kSharedMemorySegmentBytes = 100 << 10; | |
33 | |
34 // Maximum number of allocated shared-memory segments. | |
35 static const int kMaxNumSharedMemorySegments = 16; | |
36 | |
37 // Maximum number of pending WebRTC buffers that are waiting for the shared | |
38 // memory. 10 seconds for 30 fps. | |
39 static const size_t kMaxNumOfPendingBuffers = 300; | |
40 | |
41 // A shared memory segment and its allocated size. This class has the ownership | |
42 // of |shm|. | |
43 class RTCVideoDecoder::SHMBuffer { | |
44 public: | |
45 SHMBuffer(base::SharedMemory* shm, size_t size); | |
46 ~SHMBuffer(); | |
47 base::SharedMemory* const shm; | |
48 const size_t size; | |
49 }; | |
50 | |
51 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size) | |
52 : shm(shm), size(size) {} | |
53 | |
54 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); } | |
55 | |
56 // Metadata of a bitstream buffer. | |
57 struct RTCVideoDecoder::BufferData { | |
58 BufferData(int32 bitstream_buffer_id, | |
59 uint32_t timestamp, | |
60 int width, | |
61 int height, | |
62 size_t size); | |
63 ~BufferData(); | |
64 int32 bitstream_buffer_id; | |
65 uint32_t timestamp; // in 90KHz | |
66 uint32_t width; | |
67 uint32_t height; | |
68 size_t size; // buffer size | |
69 }; | |
70 | |
71 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, | |
72 uint32_t timestamp, | |
73 int width, | |
74 int height, | |
75 size_t size) | |
76 : bitstream_buffer_id(bitstream_buffer_id), | |
77 timestamp(timestamp), | |
78 width(width), | |
79 height(height), | |
80 size(size) {} | |
81 | |
82 RTCVideoDecoder::BufferData::~BufferData() {} | |
83 | |
84 RTCVideoDecoder::RTCVideoDecoder( | |
85 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) | |
86 : weak_factory_(this), | |
87 factories_(factories), | |
88 vda_loop_proxy_(factories_->GetMessageLoop()), | |
89 decoder_texture_target_(0), | |
90 next_picture_buffer_id_(0), | |
91 state_(UNINITIALIZED), | |
92 decode_complete_callback_(NULL), | |
93 num_shm_buffers_(0), | |
94 next_bitstream_buffer_id_(0), | |
95 reset_bitstream_buffer_id_(0) { | |
96 // Initialize directly if |vda_loop_proxy_| is the renderer thread. | |
97 base::WaitableEvent compositor_loop_async_waiter(false, false); | |
98 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
99 Initialize(&compositor_loop_async_waiter); | |
100 return; | |
101 } | |
102 // Post the task if |vda_loop_proxy_| is the compositor thread. Waiting here | |
103 // is safe because the compositor thread will not be stopped until the | |
104 // renderer thread shuts down. | |
105 vda_loop_proxy_->PostTask(FROM_HERE, | |
106 base::Bind(&RTCVideoDecoder::Initialize, | |
107 base::Unretained(this), | |
108 &compositor_loop_async_waiter)); | |
109 compositor_loop_async_waiter.Wait(); | |
110 } | |
111 | |
112 RTCVideoDecoder::~RTCVideoDecoder() { | |
113 DVLOG(2) << "~RTCVideoDecoder"; | |
114 // Delete vda and remove |this| from the observer if vda thread is alive. | |
115 if (vda_loop_proxy_->BelongsToCurrentThread()) { | |
116 base::MessageLoop::current()->RemoveDestructionObserver(this); | |
117 DestroyVDA(); | |
118 } else { | |
119 // VDA should have been destroyed in WillDestroyCurrentMessageLoop. | |
120 DCHECK(!vda_); | |
121 } | |
122 | |
123 // Delete all shared memories. | |
124 STLDeleteElements(&available_shm_segments_); | |
125 STLDeleteValues(&bitstream_buffers_in_decoder_); | |
126 STLDeleteContainerPairFirstPointers(buffers_to_be_decoded_.begin(), | |
127 buffers_to_be_decoded_.end()); | |
128 buffers_to_be_decoded_.clear(); | |
129 | |
130 // Delete WebRTC input buffers. | |
131 for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it = | |
132 webrtc_buffers_.begin(); | |
133 it != webrtc_buffers_.end(); | |
134 ++it) { | |
135 delete it->first._buffer; | |
136 } | |
137 } | |
138 | |
139 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( | |
140 const scoped_refptr<media::GpuVideoDecoder::Factories>& factories) { | |
141 scoped_ptr<RTCVideoDecoder> decoder(new RTCVideoDecoder(factories)); | |
142 decoder->vda_.reset(factories->CreateVideoDecodeAccelerator( | |
143 media::VP8PROFILE_MAIN, decoder.get())); | |
144 // vda can be NULL if VP8 is not supported. | |
145 if (decoder->vda_ != NULL) { | |
146 decoder->state_ = INITIALIZED; | |
147 } else { | |
148 factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release()); | |
149 } | |
150 return decoder.Pass(); | |
151 } | |
152 | |
153 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, | |
154 int32_t /*numberOfCores*/) { | |
155 DVLOG(2) << "InitDecode"; | |
156 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8); | |
157 if (codecSettings->codecSpecific.VP8.feedbackModeOn) { | |
158 LOG(ERROR) << "Feedback mode not supported"; | |
159 return WEBRTC_VIDEO_CODEC_ERROR; | |
160 } | |
161 | |
162 base::AutoLock auto_lock(lock_); | |
163 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { | |
164 LOG(ERROR) << "VDA is not initialized. state=" << state_; | |
165 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
166 } | |
167 // Create a shared memory if the queue is empty. | |
168 if (available_shm_segments_.size() == 0) { | |
169 content::ChildThread::current()->message_loop() | |
170 ->PostTask(FROM_HERE, | |
171 base::Bind(&RTCVideoDecoder::CreateSHM, | |
172 weak_this_, | |
173 kSharedMemorySegmentBytes)); | |
174 } | |
175 return WEBRTC_VIDEO_CODEC_OK; | |
176 } | |
177 | |
178 int32_t RTCVideoDecoder::Decode( | |
179 const webrtc::EncodedImage& inputImage, | |
180 bool missingFrames, | |
181 const webrtc::RTPFragmentationHeader* /*fragmentation*/, | |
182 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, | |
183 int64_t /*renderTimeMs*/) { | |
184 DVLOG(3) << "Decode"; | |
185 | |
186 int bitstream_buffer_id; | |
187 { | |
188 base::AutoLock auto_lock(lock_); | |
189 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) { | |
190 LOG(ERROR) << "The decoder has not initialized."; | |
191 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
192 } | |
193 if (state_ == DECODE_ERROR) { | |
194 LOG(ERROR) << "Decoding error occurred."; | |
195 return WEBRTC_VIDEO_CODEC_ERROR; | |
196 } | |
197 bitstream_buffer_id = next_bitstream_buffer_id_; | |
198 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. | |
199 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST; | |
200 } | |
201 if (missingFrames || !inputImage._completeFrame) { | |
202 DLOG(ERROR) << "Missing or incomplete frames."; | |
203 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. | |
204 // Return an error to request a key frame. | |
205 return WEBRTC_VIDEO_CODEC_ERROR; | |
206 } | |
207 if (inputImage._frameType == webrtc::kKeyFrame) | |
208 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); | |
209 | |
210 // Create buffer metadata. | |
211 BufferData buffer_data(bitstream_buffer_id, | |
212 inputImage._timeStamp, | |
213 frame_size_.width(), | |
214 frame_size_.height(), | |
215 inputImage._length); | |
216 | |
217 // The buffers should be sent to VDA in order. Send the pending buffers first. | |
218 SendPendingBuffersForDecode(); | |
219 | |
220 // If the shared memory is available and there are no pending buffers, send | |
221 // the buffer for decode. If not, save the buffer in the queue for decode | |
222 // later. | |
223 SHMBuffer* shm_buffer = NULL; | |
224 if (webrtc_buffers_.size() == 0) | |
225 shm_buffer = GetSHM(inputImage._length); | |
226 | |
227 if (shm_buffer != NULL) { | |
228 SendBufferForDecode(inputImage, shm_buffer, buffer_data); | |
229 return WEBRTC_VIDEO_CODEC_OK; | |
230 } | |
231 | |
232 return SaveToPendingBuffers(inputImage, buffer_data); | |
233 } | |
234 | |
235 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( | |
236 webrtc::DecodedImageCallback* callback) { | |
237 DVLOG(2) << "RegisterDecodeCompleteCallback"; | |
238 base::AutoLock auto_lock(lock_); | |
239 decode_complete_callback_ = callback; | |
240 return WEBRTC_VIDEO_CODEC_OK; | |
241 } | |
242 | |
243 int32_t RTCVideoDecoder::Release() { | |
244 DVLOG(2) << "Release"; | |
245 // Do not destroy VDA because the decoder will be recycled by | |
246 // RTCVideoDecoderFactory. Just reset VDA. | |
247 return Reset(); | |
248 } | |
249 | |
250 int32_t RTCVideoDecoder::Reset() { | |
251 DVLOG(2) << "Reset"; | |
252 base::AutoLock auto_lock(lock_); | |
253 if (state_ == UNINITIALIZED) { | |
254 LOG(ERROR) << "Decoder not initialized."; | |
255 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
256 } | |
257 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_; | |
258 // If VDA is already resetting, no need to request the reset again. | |
259 if (state_ != RESETTING) { | |
260 state_ = RESETTING; | |
261 vda_loop_proxy_->PostTask( | |
262 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_)); | |
263 } | |
264 return WEBRTC_VIDEO_CODEC_OK; | |
265 } | |
266 | |
267 void RTCVideoDecoder::NotifyInitializeDone() { | |
268 DVLOG(2) << "NotifyInitializeDone"; | |
269 NOTREACHED(); | |
270 } | |
271 | |
272 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count, | |
273 const gfx::Size& size, | |
274 uint32 texture_target) { | |
275 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
276 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target; | |
277 | |
278 if (!vda_) | |
279 return; | |
280 | |
281 std::vector<uint32> texture_ids; | |
282 decoder_texture_target_ = texture_target; | |
283 if (!factories_->CreateTextures( | |
284 count, size, &texture_ids, decoder_texture_target_)) { | |
285 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
286 return; | |
287 } | |
288 DCHECK_EQ(count, texture_ids.size()); | |
289 | |
290 std::vector<media::PictureBuffer> picture_buffers; | |
291 for (size_t i = 0; i < texture_ids.size(); ++i) { | |
292 picture_buffers.push_back( | |
293 media::PictureBuffer(next_picture_buffer_id_++, size, texture_ids[i])); | |
294 bool inserted = assigned_picture_buffers_.insert(std::make_pair( | |
295 picture_buffers.back().id(), picture_buffers.back())).second; | |
296 DCHECK(inserted); | |
297 } | |
298 vda_->AssignPictureBuffers(picture_buffers); | |
299 } | |
300 | |
301 void RTCVideoDecoder::DismissPictureBuffer(int32 id) { | |
302 DVLOG(3) << "DismissPictureBuffer. id=" << id; | |
303 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
304 | |
305 std::map<int32, media::PictureBuffer>::iterator it = | |
306 assigned_picture_buffers_.find(id); | |
307 if (it == assigned_picture_buffers_.end()) { | |
308 NOTREACHED() << "Missing picture buffer: " << id; | |
309 return; | |
310 } | |
311 | |
312 media::PictureBuffer buffer_to_dismiss = it->second; | |
313 assigned_picture_buffers_.erase(it); | |
314 | |
315 std::set<int32>::iterator at_display_it = | |
316 picture_buffers_at_display_.find(id); | |
317 | |
318 if (at_display_it == picture_buffers_at_display_.end()) { | |
319 // We can delete the texture immediately as it's not being displayed. | |
320 factories_->DeleteTexture(buffer_to_dismiss.texture_id()); | |
321 } else { | |
322 // Texture in display. Postpone deletion until after it's returned to us. | |
323 bool inserted = dismissed_picture_buffers_ | |
324 .insert(std::make_pair(id, buffer_to_dismiss)).second; | |
325 DCHECK(inserted); | |
326 } | |
327 } | |
328 | |
329 void RTCVideoDecoder::PictureReady(const media::Picture& picture) { | |
330 DVLOG(3) << "PictureReady"; | |
331 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
332 | |
333 std::map<int32, media::PictureBuffer>::iterator it = | |
334 assigned_picture_buffers_.find(picture.picture_buffer_id()); | |
335 if (it == assigned_picture_buffers_.end()) { | |
336 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id(); | |
337 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
338 return; | |
339 } | |
340 const media::PictureBuffer& pb = it->second; | |
341 | |
342 // Create a media::VideoFrame. | |
343 uint32_t timestamp = 0, width = 0, height = 0; | |
344 size_t size = 0; | |
345 GetBufferData( | |
346 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); | |
347 scoped_refptr<media::VideoFrame> frame = | |
348 CreateVideoFrame(picture, pb, timestamp, width, height, size); | |
349 bool inserted = | |
350 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second; | |
351 DCHECK(inserted); | |
352 | |
353 // Create a WebRTC video frame. | |
354 webrtc::RefCountImpl<NativeHandleImpl>* handle = | |
355 new webrtc::RefCountImpl<NativeHandleImpl>(); | |
356 handle->SetHandle(frame.get()); | |
357 webrtc::TextureVideoFrame decoded_image(width, height, timestamp, 0, handle); | |
wuchengli
2013/06/28 15:08:45
malloc has been removed. I'll update webrtc CL soo
| |
358 | |
359 // Invoke decode callback. WebRTC expects no frame callback after Release. | |
360 { | |
361 base::AutoLock auto_lock(lock_); | |
362 DCHECK(decode_complete_callback_ != NULL); | |
363 if (IsBufferAfterReset(picture.bitstream_buffer_id(), | |
364 reset_bitstream_buffer_id_)) { | |
365 decode_complete_callback_->Decoded(decoded_image); | |
366 } | |
367 } | |
368 } | |
369 | |
370 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( | |
371 const media::Picture& picture, | |
372 const media::PictureBuffer& pb, | |
373 uint32_t timestamp, | |
374 uint32_t width, | |
375 uint32_t height, | |
376 size_t size) { | |
377 gfx::Rect visible_rect(width, height); | |
378 gfx::Size natural_size(width, height); | |
379 DCHECK(decoder_texture_target_); | |
380 // Convert timestamp from 90KHz to ms. | |
381 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( | |
382 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90); | |
383 return media::VideoFrame::WrapNativeTexture( | |
384 pb.texture_id(), | |
385 decoder_texture_target_, | |
386 pb.size(), | |
387 visible_rect, | |
388 natural_size, | |
389 timestamp_ms, | |
390 base::Bind(&media::GpuVideoDecoder::Factories::ReadPixels, | |
391 factories_, | |
392 pb.texture_id(), | |
393 decoder_texture_target_, | |
394 natural_size), | |
395 media::BindToCurrentLoop(base::Bind(&RTCVideoDecoder::ReusePictureBuffer, | |
396 weak_this_, | |
397 picture.picture_buffer_id()))); | |
398 } | |
399 | |
400 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { | |
401 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; | |
402 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
403 | |
404 std::map<int32, SHMBuffer*>::iterator it = | |
405 bitstream_buffers_in_decoder_.find(id); | |
406 if (it == bitstream_buffers_in_decoder_.end()) { | |
407 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); | |
408 NOTREACHED() << "Missing bitstream buffer: " << id; | |
409 return; | |
410 } | |
411 | |
412 PutSHM(it->second); | |
413 bitstream_buffers_in_decoder_.erase(it); | |
414 | |
415 RequestBufferDecode(); | |
416 } | |
417 | |
418 void RTCVideoDecoder::NotifyFlushDone() { | |
419 DVLOG(3) << "NotifyFlushDone"; | |
420 NOTREACHED() << "Unexpected flush done notification."; | |
421 } | |
422 | |
423 void RTCVideoDecoder::NotifyResetDone() { | |
424 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
425 DVLOG(3) << "NotifyResetDone"; | |
426 | |
427 if (!vda_) | |
428 return; | |
429 | |
430 input_buffer_data_.clear(); | |
431 { | |
432 base::AutoLock auto_lock(lock_); | |
433 state_ = INITIALIZED; | |
434 } | |
435 // Send the pending buffers for decoding. | |
436 RequestBufferDecode(); | |
437 } | |
438 | |
439 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { | |
440 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
441 if (!vda_) | |
442 return; | |
443 | |
444 DLOG(ERROR) << "VDA Error:" << error; | |
445 DestroyVDA(); | |
446 | |
447 base::AutoLock auto_lock(lock_); | |
448 state_ = DECODE_ERROR; | |
449 } | |
450 | |
451 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() { | |
452 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
453 factories_->Abort(); | |
454 weak_factory_.InvalidateWeakPtrs(); | |
455 DestroyVDA(); | |
456 } | |
457 | |
458 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) { | |
459 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
460 base::MessageLoop::current()->AddDestructionObserver(this); | |
461 weak_this_ = weak_factory_.GetWeakPtr(); | |
462 waiter->Signal(); | |
463 } | |
464 | |
465 void RTCVideoDecoder::RequestBufferDecode() { | |
466 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
467 if (!vda_) | |
468 return; | |
469 | |
470 while (CanMoreDecodeWorkBeDone()) { | |
471 // Get a buffer and data from the queue. | |
472 std::pair<SHMBuffer*, BufferData>* buffer_pair; | |
473 SHMBuffer* shm_buffer = NULL; | |
474 BufferData* buffer_data = NULL; | |
475 { | |
476 base::AutoLock auto_lock(lock_); | |
477 // Do not request decode if VDA is resetting. | |
478 if (buffers_to_be_decoded_.size() == 0 || state_ == RESETTING) | |
479 return; | |
480 buffer_pair = &buffers_to_be_decoded_.front(); | |
481 buffers_to_be_decoded_.pop_front(); | |
482 shm_buffer = buffer_pair->first; | |
483 buffer_data = &buffer_pair->second; | |
484 // Drop the buffers before Reset or Release is called. | |
485 if (!IsBufferAfterReset(buffer_data->bitstream_buffer_id, | |
486 reset_bitstream_buffer_id_)) { | |
487 available_shm_segments_.push_back(shm_buffer); | |
488 continue; | |
489 } | |
490 } | |
491 | |
492 // Create a BitstreamBuffer and send to VDA to decode. | |
493 media::BitstreamBuffer bitstream_buffer(buffer_data->bitstream_buffer_id, | |
494 shm_buffer->shm->handle(), | |
495 buffer_data->size); | |
496 bool inserted = bitstream_buffers_in_decoder_ | |
497 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second; | |
498 DCHECK(inserted); | |
499 RecordBufferData(*buffer_data); | |
500 vda_->Decode(bitstream_buffer); | |
501 } | |
502 } | |
503 | |
504 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { | |
505 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; | |
506 } | |
507 | |
508 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) { | |
Ami GONE FROM CHROMIUM
2013/06/28 17:04:00
If Reset/Release have never been called, id_reset
wuchengli
2013/06/29 05:01:30
Right. Suppose we add sentinel value -1. When id_r
Ami GONE FROM CHROMIUM
2013/06/29 05:23:03
My point was that the question of whether |id_buff
wuchengli
2013/07/02 10:34:25
Done. Added ID_INVALID=-1 as sentinel value.
| |
509 int32 diff = id_buffer - id_reset; | |
510 if (diff < 0) | |
511 diff += ID_LAST + 1; | |
512 return diff < ID_HALF; | |
Ami GONE FROM CHROMIUM
2013/06/28 17:04:00
worth a test for the interesting edge conditions?
wuchengli
2013/07/02 10:34:25
Done.
| |
513 } | |
514 | |
515 void RTCVideoDecoder::SendBufferForDecode( | |
516 const webrtc::EncodedImage& input_image, | |
517 SHMBuffer* shm_buffer, | |
518 const BufferData& buffer_data) { | |
519 memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length); | |
520 std::pair<SHMBuffer*, BufferData> buffer_pair = | |
521 std::make_pair(shm_buffer, buffer_data); | |
522 | |
523 // Store the buffer and the metadata to the queue. | |
524 { | |
525 base::AutoLock auto_lock(lock_); | |
526 buffers_to_be_decoded_.push_back(buffer_pair); | |
527 } | |
528 vda_loop_proxy_->PostTask( | |
529 FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); | |
530 } | |
531 | |
532 void RTCVideoDecoder::SendPendingBuffersForDecode() { | |
533 DVLOG(2) << "SendPendingBuffersForDecode"; | |
534 while (webrtc_buffers_.size() > 0) { | |
535 // Get a WebRTC buffer from the queue. | |
536 const std::pair<webrtc::EncodedImage, BufferData>& buffer_pair = | |
537 webrtc_buffers_.front(); | |
538 const webrtc::EncodedImage& input_image = buffer_pair.first; | |
539 const BufferData& buffer_data = buffer_pair.second; | |
540 | |
541 // Drop the frame if it comes before Reset or Release. | |
542 { | |
543 base::AutoLock auto_lock(lock_); | |
544 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id, | |
545 reset_bitstream_buffer_id_)) { | |
546 webrtc_buffers_.pop_front(); | |
547 delete input_image._buffer; | |
548 continue; | |
549 } | |
550 } | |
551 | |
552 // Get shared memory and send it for decode. | |
553 SHMBuffer* shm_buffer = GetSHM(input_image._length); | |
554 if (!shm_buffer) | |
555 return; | |
556 SendBufferForDecode(input_image, shm_buffer, buffer_data); | |
557 webrtc_buffers_.pop_front(); | |
558 delete input_image._buffer; | |
559 } | |
560 } | |
561 | |
562 int RTCVideoDecoder::SaveToPendingBuffers( | |
563 const webrtc::EncodedImage& input_image, | |
564 const BufferData& buffer_data) { | |
565 DVLOG(2) << "SaveToPendingBuffers"; | |
566 // Queued too many buffers. Something goes wrong. | |
Ami GONE FROM CHROMIUM
2013/06/28 17:04:00
This is not a super-satisfying block of code...
IW
wuchengli
2013/06/29 05:01:30
I saw apprtc loopback showed the self image runnin
Ami GONE FROM CHROMIUM
2013/06/29 05:23:03
I'd rather have the bug manifest so it can be trac
wuchengli
2013/07/02 10:34:25
I added error log here. I'm not sure the issue sti
| |
567 if (webrtc_buffers_.size() >= kMaxNumOfPendingBuffers) | |
568 return WEBRTC_VIDEO_CODEC_ERROR; | |
569 | |
570 // Clone the input image and save it to the queue. | |
571 uint8_t* buffer = static_cast<uint8_t*>(malloc(input_image._length)); | |
572 memcpy(buffer, input_image._buffer, input_image._length); | |
Ami GONE FROM CHROMIUM
2013/06/28 17:04:00
Add a TODO to extend the Decode() interface to tak
wuchengli
2013/07/02 10:34:25
Done.
| |
573 webrtc::EncodedImage encoded_image( | |
574 buffer, input_image._length, input_image._length); | |
575 std::pair<webrtc::EncodedImage, BufferData> buffer_pair = | |
576 std::make_pair(encoded_image, buffer_data); | |
577 webrtc_buffers_.push_back(buffer_pair); | |
578 return WEBRTC_VIDEO_CODEC_OK; | |
579 } | |
580 | |
581 void RTCVideoDecoder::ResetInternal() { | |
582 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
583 DVLOG(2) << "ResetInternal"; | |
584 if (vda_) | |
585 vda_->Reset(); | |
586 } | |
587 | |
588 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id) { | |
589 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
590 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id; | |
591 | |
592 if (!vda_) | |
593 return; | |
594 | |
595 CHECK(!picture_buffers_at_display_.empty()); | |
596 | |
597 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id); | |
598 DCHECK(num_erased); | |
599 | |
600 std::map<int32, media::PictureBuffer>::iterator it = | |
601 assigned_picture_buffers_.find(picture_buffer_id); | |
602 | |
603 if (it == assigned_picture_buffers_.end()) { | |
604 // This picture was dismissed while in display, so we postponed deletion. | |
605 it = dismissed_picture_buffers_.find(picture_buffer_id); | |
606 DCHECK(it != dismissed_picture_buffers_.end()); | |
607 factories_->DeleteTexture(it->second.texture_id()); | |
608 dismissed_picture_buffers_.erase(it); | |
609 return; | |
610 } | |
611 | |
612 vda_->ReusePictureBuffer(picture_buffer_id); | |
613 } | |
614 | |
615 void RTCVideoDecoder::DestroyTextures() { | |
616 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
617 std::map<int32, media::PictureBuffer>::iterator it; | |
618 | |
619 for (it = assigned_picture_buffers_.begin(); | |
620 it != assigned_picture_buffers_.end(); | |
621 ++it) { | |
622 factories_->DeleteTexture(it->second.texture_id()); | |
623 } | |
624 assigned_picture_buffers_.clear(); | |
625 | |
626 for (it = dismissed_picture_buffers_.begin(); | |
627 it != dismissed_picture_buffers_.end(); | |
628 ++it) { | |
629 factories_->DeleteTexture(it->second.texture_id()); | |
630 } | |
631 dismissed_picture_buffers_.clear(); | |
632 } | |
633 | |
634 void RTCVideoDecoder::DestroyVDA() { | |
635 DVLOG(2) << "DestroyVDA"; | |
636 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); | |
637 if (vda_) | |
638 vda_.release()->Destroy(); | |
639 DestroyTextures(); | |
640 } | |
641 | |
642 RTCVideoDecoder::SHMBuffer* RTCVideoDecoder::GetSHM(size_t min_size) { | |
643 // Reuse a SHM if possible. | |
644 SHMBuffer* ret = NULL; | |
645 base::AutoLock auto_lock(lock_); | |
646 if (!available_shm_segments_.empty() && | |
647 available_shm_segments_.back()->size >= min_size) { | |
648 ret = available_shm_segments_.back(); | |
649 available_shm_segments_.pop_back(); | |
650 } | |
651 // Post to the child thread to create shared memory if SHM cannot be reused | |
652 // or the queue is almost empty. | |
653 if (num_shm_buffers_ < kMaxNumSharedMemorySegments && | |
654 (ret == NULL || available_shm_segments_.size() <= 1)) { | |
655 content::ChildThread::current()->message_loop()->PostTask( | |
656 FROM_HERE, | |
657 base::Bind(&RTCVideoDecoder::CreateSHM, weak_this_, min_size)); | |
658 } | |
659 return ret; | |
660 } | |
661 | |
662 void RTCVideoDecoder::CreateSHM(size_t min_size) { | |
663 DCHECK(base::MessageLoop::current() == | |
664 content::ChildThread::current()->message_loop()); | |
665 DVLOG(2) << "CreateSharedMemory. size=" << min_size; | |
666 { | |
667 base::AutoLock auto_lock(lock_); | |
668 if (num_shm_buffers_ >= kMaxNumSharedMemorySegments) | |
669 return; | |
670 } | |
671 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes); | |
672 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate); | |
673 if (shm != NULL) { | |
674 { | |
675 base::AutoLock auto_lock(lock_); | |
676 num_shm_buffers_++; | |
677 } | |
678 PutSHM(new SHMBuffer(shm, size_to_allocate)); | |
679 } | |
680 } | |
681 | |
682 void RTCVideoDecoder::PutSHM(SHMBuffer* shm_buffer) { | |
683 base::AutoLock auto_lock(lock_); | |
684 available_shm_segments_.push_back(shm_buffer); | |
Ami GONE FROM CHROMIUM
2013/06/28 17:04:00
does it make sense to SendPendingBuffersForDecode
wuchengli
2013/06/29 05:01:30
PutSHM can be called by the compositor thread or t
Ami GONE FROM CHROMIUM
2013/06/29 05:23:03
My point is that if there are pending decodes that
wuchengli
2013/07/02 10:34:25
I've changed the code. Now there are two buffer qu
| |
685 } | |
686 | |
687 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { | |
688 input_buffer_data_.push_front(buffer_data); | |
689 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but | |
690 // that's too small for some pathological B-frame test videos. The cost of | |
691 // using too-high a value is low (192 bits per extra slot). | |
692 static const size_t kMaxInputBufferDataSize = 128; | |
693 // Pop from the back of the list, because that's the oldest and least likely | |
694 // to be useful in the future data. | |
695 if (input_buffer_data_.size() > kMaxInputBufferDataSize) | |
696 input_buffer_data_.pop_back(); | |
697 } | |
698 | |
699 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, | |
700 uint32_t* timestamp, | |
701 uint32_t* width, | |
702 uint32_t* height, | |
703 size_t* size) { | |
704 for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); | |
705 it != input_buffer_data_.end(); | |
706 ++it) { | |
707 if (it->bitstream_buffer_id != bitstream_buffer_id) | |
708 continue; | |
709 *timestamp = it->timestamp; | |
710 *width = it->width; | |
711 *height = it->height; | |
712 return; | |
713 } | |
714 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; | |
715 } | |
716 | |
717 } // namespace content | |
OLD | NEW |