Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(505)

Side by Side Diff: content/renderer/media/rtc_video_decoder.cc

Issue 2095393002: Add posciak and wuchengli to WebRTC HW decoder and encoder files (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: remove duplicated owners Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/rtc_video_decoder.h"
6
7 #include <utility>
8
9 #include "base/bind.h"
10 #include "base/logging.h"
11 #include "base/memory/ref_counted.h"
12 #include "base/metrics/histogram.h"
13 #include "base/numerics/safe_conversions.h"
14 #include "base/stl_util.h"
15 #include "base/synchronization/waitable_event.h"
16 #include "base/task_runner_util.h"
17 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
18 #include "gpu/command_buffer/common/mailbox_holder.h"
19 #include "media/base/bind_to_current_loop.h"
20 #include "media/renderers/gpu_video_accelerator_factories.h"
21 #include "third_party/skia/include/core/SkBitmap.h"
22 #include "third_party/webrtc/base/bind.h"
23 #include "third_party/webrtc/base/refcount.h"
24 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h"
25 #include "third_party/webrtc/video_frame.h"
26
27 #if defined(OS_WIN)
28 #include "base/command_line.h"
29 #include "base/win/windows_version.h"
30 #include "content/public/common/content_switches.h"
31 #endif // defined(OS_WIN)
32
33 namespace content {
34
35 const int32_t RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
36 const int32_t RTCVideoDecoder::ID_HALF = 0x20000000;
37 const int32_t RTCVideoDecoder::ID_INVALID = -1;
38 const uint32_t kNumVDAErrorsBeforeSWFallback = 50;
39
40 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
41 // Higher values allow better pipelining in the GPU, but also require more
42 // resources.
43 static const size_t kMaxInFlightDecodes = 8;
44
45 // Number of allocated shared memory segments.
46 static const size_t kNumSharedMemorySegments = 16;
47
48 // Maximum number of pending WebRTC buffers that are waiting for shared memory.
49 static const size_t kMaxNumOfPendingBuffers = 8;
50
51 RTCVideoDecoder::BufferData::BufferData(int32_t bitstream_buffer_id,
52 uint32_t timestamp,
53 size_t size,
54 const gfx::Rect& visible_rect)
55 : bitstream_buffer_id(bitstream_buffer_id),
56 timestamp(timestamp),
57 size(size),
58 visible_rect(visible_rect) {}
59
60 RTCVideoDecoder::BufferData::BufferData() {}
61
62 RTCVideoDecoder::BufferData::~BufferData() {}
63
64 RTCVideoDecoder::RTCVideoDecoder(webrtc::VideoCodecType type,
65 media::GpuVideoAcceleratorFactories* factories)
66 : vda_error_counter_(0),
67 video_codec_type_(type),
68 factories_(factories),
69 decoder_texture_target_(0),
70 pixel_format_(media::PIXEL_FORMAT_UNKNOWN),
71 next_picture_buffer_id_(0),
72 state_(UNINITIALIZED),
73 decode_complete_callback_(nullptr),
74 num_shm_buffers_(0),
75 next_bitstream_buffer_id_(0),
76 reset_bitstream_buffer_id_(ID_INVALID),
77 weak_factory_(this) {
78 DCHECK(!factories_->GetTaskRunner()->BelongsToCurrentThread());
79 }
80
81 RTCVideoDecoder::~RTCVideoDecoder() {
82 DVLOG(2) << "~RTCVideoDecoder";
83 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
84 DestroyVDA();
85
86 // Delete all shared memories.
87 STLDeleteElements(&available_shm_segments_);
88 STLDeleteValues(&bitstream_buffers_in_decoder_);
89 STLDeleteContainerPairFirstPointers(decode_buffers_.begin(),
90 decode_buffers_.end());
91 decode_buffers_.clear();
92 ClearPendingBuffers();
93 }
94
95 // static
96 std::unique_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
97 webrtc::VideoCodecType type,
98 media::GpuVideoAcceleratorFactories* factories) {
99 std::unique_ptr<RTCVideoDecoder> decoder;
100 // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5717.
101 #if defined(OS_WIN)
102 if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
103 switches::kEnableWin7WebRtcHWH264Decoding) &&
104 type == webrtc::kVideoCodecH264 &&
105 base::win::GetVersion() == base::win::VERSION_WIN7) {
106 DLOG(ERROR) << "H264 HW decoding on Win7 is not supported.";
107 return decoder;
108 }
109 #endif // defined(OS_WIN)
110 // Convert WebRTC codec type to media codec profile.
111 media::VideoCodecProfile profile;
112 switch (type) {
113 case webrtc::kVideoCodecVP8:
114 profile = media::VP8PROFILE_ANY;
115 break;
116 case webrtc::kVideoCodecH264:
117 profile = media::H264PROFILE_MAIN;
118 break;
119 default:
120 DVLOG(2) << "Video codec not supported:" << type;
121 return decoder;
122 }
123
124 base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
125 base::WaitableEvent::InitialState::NOT_SIGNALED);
126 decoder.reset(new RTCVideoDecoder(type, factories));
127 decoder->factories_->GetTaskRunner()->PostTask(
128 FROM_HERE,
129 base::Bind(&RTCVideoDecoder::CreateVDA,
130 base::Unretained(decoder.get()),
131 profile,
132 &waiter));
133 waiter.Wait();
134 // |decoder->vda_| is nullptr if the codec is not supported.
135 if (decoder->vda_)
136 decoder->state_ = INITIALIZED;
137 else
138 factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder.release());
139 return decoder;
140 }
141
142 // static
143 void RTCVideoDecoder::Destroy(webrtc::VideoDecoder* decoder,
144 media::GpuVideoAcceleratorFactories* factories) {
145 factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder);
146 }
147
148 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
149 int32_t /*numberOfCores*/) {
150 DVLOG(2) << "InitDecode";
151 DCHECK_EQ(video_codec_type_, codecSettings->codecType);
152 if (codecSettings->codecType == webrtc::kVideoCodecVP8 &&
153 codecSettings->codecSpecific.VP8.feedbackModeOn) {
154 LOG(ERROR) << "Feedback mode not supported";
155 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
156 }
157
158 base::AutoLock auto_lock(lock_);
159 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
160 LOG(ERROR) << "VDA is not initialized. state=" << state_;
161 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
162 }
163
164 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
165 }
166
167 int32_t RTCVideoDecoder::Decode(
168 const webrtc::EncodedImage& inputImage,
169 bool missingFrames,
170 const webrtc::RTPFragmentationHeader* /*fragmentation*/,
171 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
172 int64_t /*renderTimeMs*/) {
173 DVLOG(3) << "Decode";
174
175 base::AutoLock auto_lock(lock_);
176
177 if (state_ == UNINITIALIZED || !decode_complete_callback_) {
178 LOG(ERROR) << "The decoder has not initialized.";
179 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
180 }
181
182 if (state_ == DECODE_ERROR) {
183 LOG(ERROR) << "Decoding error occurred.";
184 // Try reseting the session up to |kNumVDAErrorsHandled| times.
185 // Check if SW H264 implementation is available before falling back.
186 if (vda_error_counter_ > kNumVDAErrorsBeforeSWFallback &&
187 (video_codec_type_ != webrtc::kVideoCodecH264 ||
188 webrtc::H264Decoder::IsSupported())) {
189 DLOG(ERROR) << vda_error_counter_
190 << " errors reported by VDA, falling back to software decode";
191 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
192 }
193 base::AutoUnlock auto_unlock(lock_);
194 Release();
195 return WEBRTC_VIDEO_CODEC_ERROR;
196 }
197
198 if (missingFrames || !inputImage._completeFrame) {
199 DLOG(ERROR) << "Missing or incomplete frames.";
200 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
201 // Return an error to request a key frame.
202 return WEBRTC_VIDEO_CODEC_ERROR;
203 }
204
205 // Most platforms' VDA implementations support mid-stream resolution change
206 // internally. Platforms whose VDAs fail to support mid-stream resolution
207 // change gracefully need to have their clients cover for them, and we do that
208 // here.
209 #ifdef ANDROID
210 const bool kVDACanHandleMidstreamResize = false;
211 #else
212 const bool kVDACanHandleMidstreamResize = true;
213 #endif
214
215 bool need_to_reset_for_midstream_resize = false;
216 if (inputImage._frameType == webrtc::kVideoFrameKey) {
217 const gfx::Size new_frame_size(inputImage._encodedWidth,
218 inputImage._encodedHeight);
219 DVLOG(2) << "Got key frame. size=" << new_frame_size.ToString();
220
221 if (new_frame_size.width() > max_resolution_.width() ||
222 new_frame_size.width() < min_resolution_.width() ||
223 new_frame_size.height() > max_resolution_.height() ||
224 new_frame_size.height() < min_resolution_.height()) {
225 DVLOG(1) << "Resolution unsupported, falling back to software decode";
226 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
227 }
228
229 gfx::Size prev_frame_size = frame_size_;
230 frame_size_ = new_frame_size;
231 if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
232 prev_frame_size != frame_size_) {
233 need_to_reset_for_midstream_resize = true;
234 }
235 } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_,
236 reset_bitstream_buffer_id_)) {
237 // TODO(wuchengli): VDA should handle it. Remove this when
238 // http://crosbug.com/p/21913 is fixed.
239 DVLOG(1) << "The first frame should be a key frame. Drop this.";
240 return WEBRTC_VIDEO_CODEC_ERROR;
241 }
242
243 // Create buffer metadata.
244 BufferData buffer_data(next_bitstream_buffer_id_,
245 inputImage._timeStamp,
246 inputImage._length,
247 gfx::Rect(frame_size_));
248 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
249 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST;
250
251 // If a shared memory segment is available, there are no pending buffers, and
252 // this isn't a mid-stream resolution change, then send the buffer for decode
253 // immediately. Otherwise, save the buffer in the queue for later decode.
254 std::unique_ptr<base::SharedMemory> shm_buffer;
255 if (!need_to_reset_for_midstream_resize && pending_buffers_.empty())
256 shm_buffer = GetSHM_Locked(inputImage._length);
257 if (!shm_buffer) {
258 if (!SaveToPendingBuffers_Locked(inputImage, buffer_data)) {
259 // We have exceeded the pending buffers count, we are severely behind.
260 // Since we are returning ERROR, WebRTC will not be interested in the
261 // remaining buffers, and will provide us with a new keyframe instead.
262 // Better to drop any pending buffers and start afresh to catch up faster.
263 DVLOG(1) << "Exceeded maximum pending buffer count, dropping";
264 ClearPendingBuffers();
265 return WEBRTC_VIDEO_CODEC_ERROR;
266 }
267
268 if (need_to_reset_for_midstream_resize) {
269 base::AutoUnlock auto_unlock(lock_);
270 Release();
271 }
272
273 TryResetVDAErrorCounter_Locked();
274 return WEBRTC_VIDEO_CODEC_OK;
275 }
276
277 SaveToDecodeBuffers_Locked(inputImage, std::move(shm_buffer), buffer_data);
278 factories_->GetTaskRunner()->PostTask(
279 FROM_HERE,
280 base::Bind(&RTCVideoDecoder::RequestBufferDecode,
281 weak_factory_.GetWeakPtr()));
282 TryResetVDAErrorCounter_Locked();
283 return WEBRTC_VIDEO_CODEC_OK;
284 }
285
286 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
287 webrtc::DecodedImageCallback* callback) {
288 DVLOG(2) << "RegisterDecodeCompleteCallback";
289 DCHECK(callback);
290 base::AutoLock auto_lock(lock_);
291 decode_complete_callback_ = callback;
292 return WEBRTC_VIDEO_CODEC_OK;
293 }
294
295 int32_t RTCVideoDecoder::Release() {
296 DVLOG(2) << "Release";
297 // Do not destroy VDA because WebRTC can call InitDecode and start decoding
298 // again.
299 base::AutoLock auto_lock(lock_);
300 if (state_ == UNINITIALIZED) {
301 LOG(ERROR) << "Decoder not initialized.";
302 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
303 }
304 if (next_bitstream_buffer_id_ != 0)
305 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1;
306 else
307 reset_bitstream_buffer_id_ = ID_LAST;
308 // If VDA is already resetting, no need to request the reset again.
309 if (state_ != RESETTING) {
310 state_ = RESETTING;
311 factories_->GetTaskRunner()->PostTask(
312 FROM_HERE,
313 base::Bind(&RTCVideoDecoder::ResetInternal,
314 weak_factory_.GetWeakPtr()));
315 }
316 return WEBRTC_VIDEO_CODEC_OK;
317 }
318
319 void RTCVideoDecoder::ProvidePictureBuffers(uint32_t count,
320 media::VideoPixelFormat format,
321 uint32_t textures_per_buffer,
322 const gfx::Size& size,
323 uint32_t texture_target) {
324 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
325 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
326 DCHECK_EQ(1u, textures_per_buffer);
327
328 if (!vda_)
329 return;
330
331 std::vector<uint32_t> texture_ids;
332 std::vector<gpu::Mailbox> texture_mailboxes;
333 decoder_texture_target_ = texture_target;
334
335 if (format == media::PIXEL_FORMAT_UNKNOWN)
336 format = media::PIXEL_FORMAT_ARGB;
337
338 if ((pixel_format_ != media::PIXEL_FORMAT_UNKNOWN) &&
339 (format != pixel_format_)) {
340 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
341 return;
342 }
343
344 pixel_format_ = format;
345 if (!factories_->CreateTextures(count,
346 size,
347 &texture_ids,
348 &texture_mailboxes,
349 decoder_texture_target_)) {
350 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
351 return;
352 }
353 DCHECK_EQ(count, texture_ids.size());
354 DCHECK_EQ(count, texture_mailboxes.size());
355
356 std::vector<media::PictureBuffer> picture_buffers;
357 for (size_t i = 0; i < texture_ids.size(); ++i) {
358 media::PictureBuffer::TextureIds ids;
359 ids.push_back(texture_ids[i]);
360 std::vector<gpu::Mailbox> mailboxes;
361 mailboxes.push_back(texture_mailboxes[i]);
362
363 picture_buffers.push_back(
364 media::PictureBuffer(next_picture_buffer_id_++, size, ids, mailboxes));
365 bool inserted = assigned_picture_buffers_.insert(std::make_pair(
366 picture_buffers.back().id(), picture_buffers.back())).second;
367 DCHECK(inserted);
368 }
369 vda_->AssignPictureBuffers(picture_buffers);
370 }
371
372 void RTCVideoDecoder::DismissPictureBuffer(int32_t id) {
373 DVLOG(3) << "DismissPictureBuffer. id=" << id;
374 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
375
376 std::map<int32_t, media::PictureBuffer>::iterator it =
377 assigned_picture_buffers_.find(id);
378 if (it == assigned_picture_buffers_.end()) {
379 NOTREACHED() << "Missing picture buffer: " << id;
380 return;
381 }
382
383 media::PictureBuffer buffer_to_dismiss = it->second;
384 assigned_picture_buffers_.erase(it);
385
386 if (!picture_buffers_at_display_.count(id)) {
387 // We can delete the texture immediately as it's not being displayed.
388 factories_->DeleteTexture(buffer_to_dismiss.texture_ids()[0]);
389 return;
390 }
391 // Not destroying a texture in display in |picture_buffers_at_display_|.
392 // Postpone deletion until after it's returned to us.
393 }
394
395 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
396 DVLOG(3) << "PictureReady";
397 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
398
399 std::map<int32_t, media::PictureBuffer>::iterator it =
400 assigned_picture_buffers_.find(picture.picture_buffer_id());
401 if (it == assigned_picture_buffers_.end()) {
402 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
403 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
404 return;
405 }
406
407 uint32_t timestamp = 0;
408 gfx::Rect visible_rect;
409 GetBufferData(picture.bitstream_buffer_id(), &timestamp, &visible_rect);
410 if (!picture.visible_rect().IsEmpty())
411 visible_rect = picture.visible_rect();
412
413 const media::PictureBuffer& pb = it->second;
414 if (visible_rect.IsEmpty() || !gfx::Rect(pb.size()).Contains(visible_rect)) {
415 LOG(ERROR) << "Invalid picture size: " << visible_rect.ToString()
416 << " should fit in " << pb.size().ToString();
417 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
418 return;
419 }
420
421 scoped_refptr<media::VideoFrame> frame =
422 CreateVideoFrame(picture, pb, timestamp, visible_rect, pixel_format_);
423 if (!frame) {
424 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
425 return;
426 }
427 bool inserted = picture_buffers_at_display_
428 .insert(std::make_pair(picture.picture_buffer_id(),
429 pb.texture_ids()[0]))
430 .second;
431 DCHECK(inserted);
432
433 // Create a WebRTC video frame.
434 webrtc::VideoFrame decoded_image(
435 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), timestamp, 0,
436 webrtc::kVideoRotation_0);
437
438 // Invoke decode callback. WebRTC expects no callback after Release.
439 {
440 base::AutoLock auto_lock(lock_);
441 DCHECK(decode_complete_callback_);
442 if (IsBufferAfterReset(picture.bitstream_buffer_id(),
443 reset_bitstream_buffer_id_)) {
444 decode_complete_callback_->Decoded(decoded_image);
445 }
446 }
447 }
448
449 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
450 const media::Picture& picture,
451 const media::PictureBuffer& pb,
452 uint32_t timestamp,
453 const gfx::Rect& visible_rect,
454 media::VideoPixelFormat pixel_format) {
455 DCHECK(decoder_texture_target_);
456 // Convert timestamp from 90KHz to ms.
457 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
458 base::checked_cast<uint64_t>(timestamp) * 1000 / 90);
459 // TODO(mcasas): The incoming data may actually be in a YUV format, but may be
460 // labelled as ARGB. This may or may not be reported by VDA, depending on
461 // whether it provides an implementation of VDA::GetOutputFormat().
462 // This prevents the compositor from messing with it, since the underlying
463 // platform can handle the former format natively. Make sure the
464 // correct format is used and everyone down the line understands it.
465 gpu::MailboxHolder holders[media::VideoFrame::kMaxPlanes] = {
466 gpu::MailboxHolder(pb.texture_mailbox(0), gpu::SyncToken(),
467 decoder_texture_target_)};
468 scoped_refptr<media::VideoFrame> frame =
469 media::VideoFrame::WrapNativeTextures(
470 pixel_format, holders,
471 media::BindToCurrentLoop(base::Bind(
472 &RTCVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(),
473 factories_, picture.picture_buffer_id(), pb.texture_ids()[0])),
474 pb.size(), visible_rect, visible_rect.size(), timestamp_ms);
475 if (frame && picture.allow_overlay()) {
476 frame->metadata()->SetBoolean(media::VideoFrameMetadata::ALLOW_OVERLAY,
477 true);
478 }
479 return frame;
480 }
481
482 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32_t id) {
483 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
484 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
485
486 std::map<int32_t, base::SharedMemory*>::iterator it =
487 bitstream_buffers_in_decoder_.find(id);
488 if (it == bitstream_buffers_in_decoder_.end()) {
489 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
490 NOTREACHED() << "Missing bitstream buffer: " << id;
491 return;
492 }
493
494 {
495 base::AutoLock auto_lock(lock_);
496 PutSHM_Locked(std::unique_ptr<base::SharedMemory>(it->second));
497 }
498 bitstream_buffers_in_decoder_.erase(it);
499
500 RequestBufferDecode();
501 }
502
503 void RTCVideoDecoder::NotifyFlushDone() {
504 DVLOG(3) << "NotifyFlushDone";
505 NOTREACHED() << "Unexpected flush done notification.";
506 }
507
508 void RTCVideoDecoder::NotifyResetDone() {
509 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
510 DVLOG(3) << "NotifyResetDone";
511
512 if (!vda_)
513 return;
514
515 input_buffer_data_.clear();
516 {
517 base::AutoLock auto_lock(lock_);
518 state_ = INITIALIZED;
519 }
520 // Send the pending buffers for decoding.
521 RequestBufferDecode();
522 }
523
524 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) {
525 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
526 if (!vda_)
527 return;
528
529 LOG(ERROR) << "VDA Error:" << error;
530 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError", error,
531 media::VideoDecodeAccelerator::ERROR_MAX + 1);
532 DestroyVDA();
533
534 base::AutoLock auto_lock(lock_);
535 state_ = DECODE_ERROR;
536 ++vda_error_counter_;
537 }
538
539 void RTCVideoDecoder::RequestBufferDecode() {
540 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
541 if (!vda_)
542 return;
543
544 MovePendingBuffersToDecodeBuffers();
545
546 while (CanMoreDecodeWorkBeDone()) {
547 // Get a buffer and data from the queue.
548 std::unique_ptr<base::SharedMemory> shm_buffer;
549 BufferData buffer_data;
550 {
551 base::AutoLock auto_lock(lock_);
552 // Do not request decode if VDA is resetting.
553 if (decode_buffers_.empty() || state_ == RESETTING)
554 return;
555 shm_buffer.reset(decode_buffers_.front().first);
556 buffer_data = decode_buffers_.front().second;
557 decode_buffers_.pop_front();
558 // Drop the buffers before Release is called.
559 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
560 reset_bitstream_buffer_id_)) {
561 PutSHM_Locked(std::move(shm_buffer));
562 continue;
563 }
564 }
565
566 // Create a BitstreamBuffer and send to VDA to decode.
567 media::BitstreamBuffer bitstream_buffer(
568 buffer_data.bitstream_buffer_id, shm_buffer->handle(), buffer_data.size,
569 0, base::TimeDelta::FromInternalValue(buffer_data.timestamp));
570 const bool inserted =
571 bitstream_buffers_in_decoder_.insert(
572 std::make_pair(bitstream_buffer.id(), shm_buffer.release())).second;
573 DCHECK(inserted) << "bitstream_buffer_id " << bitstream_buffer.id()
574 << " existed already in bitstream_buffers_in_decoder_";
575 RecordBufferData(buffer_data);
576 vda_->Decode(bitstream_buffer);
577 }
578 }
579
580 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
581 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes;
582 }
583
584 bool RTCVideoDecoder::IsBufferAfterReset(int32_t id_buffer, int32_t id_reset) {
585 if (id_reset == ID_INVALID)
586 return true;
587 int32_t diff = id_buffer - id_reset;
588 if (diff <= 0)
589 diff += ID_LAST + 1;
590 return diff < ID_HALF;
591 }
592
593 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32_t id_buffer,
594 int32_t id_reset) {
595 if (id_reset == ID_INVALID)
596 return id_buffer == 0;
597 return id_buffer == ((id_reset + 1) & ID_LAST);
598 }
599
600 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
601 const webrtc::EncodedImage& input_image,
602 std::unique_ptr<base::SharedMemory> shm_buffer,
603 const BufferData& buffer_data) {
604 memcpy(shm_buffer->memory(), input_image._buffer, input_image._length);
605 std::pair<base::SharedMemory*, BufferData> buffer_pair =
606 std::make_pair(shm_buffer.release(), buffer_data);
607
608 // Store the buffer and the metadata to the queue.
609 decode_buffers_.push_back(buffer_pair);
610 }
611
612 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
613 const webrtc::EncodedImage& input_image,
614 const BufferData& buffer_data) {
615 DVLOG(2) << "SaveToPendingBuffers_Locked"
616 << ". pending_buffers size=" << pending_buffers_.size()
617 << ". decode_buffers_ size=" << decode_buffers_.size()
618 << ". available_shm size=" << available_shm_segments_.size();
619 // Queued too many buffers. Something goes wrong.
620 if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
621 LOG(WARNING) << "Too many pending buffers!";
622 return false;
623 }
624
625 // Clone the input image and save it to the queue.
626 uint8_t* buffer = new uint8_t[input_image._length];
627 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
628 // interface to take a non-const ptr to the frame and add a method to the
629 // frame that will swap buffers with another.
630 memcpy(buffer, input_image._buffer, input_image._length);
631 webrtc::EncodedImage encoded_image(
632 buffer, input_image._length, input_image._length);
633 std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
634 std::make_pair(encoded_image, buffer_data);
635
636 pending_buffers_.push_back(buffer_pair);
637 return true;
638 }
639
640 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
641 base::AutoLock auto_lock(lock_);
642 while (pending_buffers_.size() > 0) {
643 // Get a pending buffer from the queue.
644 const webrtc::EncodedImage& input_image = pending_buffers_.front().first;
645 const BufferData& buffer_data = pending_buffers_.front().second;
646
647 // Drop the frame if it comes before Release.
648 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
649 reset_bitstream_buffer_id_)) {
650 delete[] input_image._buffer;
651 pending_buffers_.pop_front();
652 continue;
653 }
654 // Get shared memory and save it to decode buffers.
655 std::unique_ptr<base::SharedMemory> shm_buffer =
656 GetSHM_Locked(input_image._length);
657 if (!shm_buffer)
658 return;
659 SaveToDecodeBuffers_Locked(input_image, std::move(shm_buffer), buffer_data);
660 delete[] input_image._buffer;
661 pending_buffers_.pop_front();
662 }
663 }
664
665 void RTCVideoDecoder::ResetInternal() {
666 DVLOG(2) << __FUNCTION__;
667 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
668
669 if (vda_) {
670 vda_->Reset();
671 } else {
672 CreateVDA(vda_codec_profile_, nullptr);
673 if (vda_)
674 state_ = INITIALIZED;
675 }
676 }
677
678 // static
679 void RTCVideoDecoder::ReleaseMailbox(
680 base::WeakPtr<RTCVideoDecoder> decoder,
681 media::GpuVideoAcceleratorFactories* factories,
682 int64_t picture_buffer_id,
683 uint32_t texture_id,
684 const gpu::SyncToken& release_sync_token) {
685 DCHECK(factories->GetTaskRunner()->BelongsToCurrentThread());
686 factories->WaitSyncToken(release_sync_token);
687
688 if (decoder) {
689 decoder->ReusePictureBuffer(picture_buffer_id);
690 return;
691 }
692 // It's the last chance to delete the texture after display,
693 // because RTCVideoDecoder was destructed.
694 factories->DeleteTexture(texture_id);
695 }
696
697 void RTCVideoDecoder::ReusePictureBuffer(int64_t picture_buffer_id) {
698 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
699 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
700
701 DCHECK(!picture_buffers_at_display_.empty());
702 PictureBufferTextureMap::iterator display_iterator =
703 picture_buffers_at_display_.find(picture_buffer_id);
704 uint32_t texture_id = display_iterator->second;
705 DCHECK(display_iterator != picture_buffers_at_display_.end());
706 picture_buffers_at_display_.erase(display_iterator);
707
708 if (!assigned_picture_buffers_.count(picture_buffer_id)) {
709 // This picture was dismissed while in display, so we postponed deletion.
710 factories_->DeleteTexture(texture_id);
711 return;
712 }
713
714 // DestroyVDA() might already have been called.
715 if (vda_)
716 vda_->ReusePictureBuffer(picture_buffer_id);
717 }
718
719 bool RTCVideoDecoder::IsProfileSupported(media::VideoCodecProfile profile) {
720 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
721 media::VideoDecodeAccelerator::Capabilities capabilities =
722 factories_->GetVideoDecodeAcceleratorCapabilities();
723
724 for (const auto& supported_profile : capabilities.supported_profiles) {
725 if (profile == supported_profile.profile) {
726 min_resolution_ = supported_profile.min_resolution;
727 max_resolution_ = supported_profile.max_resolution;
728 return true;
729 }
730 }
731
732 return false;
733 }
734
735 void RTCVideoDecoder::CreateVDA(media::VideoCodecProfile profile,
736 base::WaitableEvent* waiter) {
737 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
738
739 if (!IsProfileSupported(profile)) {
740 DVLOG(1) << "Unsupported profile " << profile;
741 } else {
742 vda_ = factories_->CreateVideoDecodeAccelerator();
743
744 media::VideoDecodeAccelerator::Config config(profile);
745 if (vda_ && !vda_->Initialize(config, this))
746 vda_.release()->Destroy();
747 vda_codec_profile_ = profile;
748 }
749
750 if (waiter)
751 waiter->Signal();
752 }
753
754 void RTCVideoDecoder::DestroyTextures() {
755 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
756
757 // Not destroying PictureBuffers in |picture_buffers_at_display_| yet, since
758 // their textures may still be in use by the user of this RTCVideoDecoder.
759 for (const auto& picture_buffer_at_display : picture_buffers_at_display_)
760 assigned_picture_buffers_.erase(picture_buffer_at_display.first);
761
762 for (const auto& assigned_picture_buffer : assigned_picture_buffers_)
763 factories_->DeleteTexture(assigned_picture_buffer.second.texture_ids()[0]);
764
765 assigned_picture_buffers_.clear();
766 }
767
768 void RTCVideoDecoder::DestroyVDA() {
769 DVLOG(2) << "DestroyVDA";
770 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
771 if (vda_)
772 vda_.release()->Destroy();
773 DestroyTextures();
774
775 base::AutoLock auto_lock(lock_);
776
777 // Put the buffers back in case we restart the decoder.
778 for (const auto& buffer : bitstream_buffers_in_decoder_)
779 PutSHM_Locked(std::unique_ptr<base::SharedMemory>(buffer.second));
780 bitstream_buffers_in_decoder_.clear();
781
782 state_ = UNINITIALIZED;
783 }
784
785 std::unique_ptr<base::SharedMemory> RTCVideoDecoder::GetSHM_Locked(
786 size_t min_size) {
787 // Reuse a SHM if possible.
788 if (!available_shm_segments_.empty() &&
789 available_shm_segments_.back()->mapped_size() >= min_size) {
790 std::unique_ptr<base::SharedMemory> buffer(available_shm_segments_.back());
791 available_shm_segments_.pop_back();
792 return buffer;
793 }
794
795 if (available_shm_segments_.size() != num_shm_buffers_) {
796 // Either available_shm_segments_ is empty (and we already have some SHM
797 // buffers allocated), or the size of available segments is not large
798 // enough. In the former case we need to wait for buffers to be returned,
799 // in the latter we need to wait for all buffers to be returned to drop
800 // them and reallocate with a new size.
801 return NULL;
802 }
803
804 if (num_shm_buffers_ != 0) {
805 STLDeleteElements(&available_shm_segments_);
806 num_shm_buffers_ = 0;
807 }
808
809 // Create twice as large buffers as required, to avoid frequent reallocation.
810 factories_->GetTaskRunner()->PostTask(
811 FROM_HERE,
812 base::Bind(&RTCVideoDecoder::CreateSHM, weak_factory_.GetWeakPtr(),
813 kNumSharedMemorySegments, min_size * 2));
814
815 // We'll be called again after the shared memory is created.
816 return NULL;
817 }
818
819 void RTCVideoDecoder::PutSHM_Locked(
820 std::unique_ptr<base::SharedMemory> shm_buffer) {
821 lock_.AssertAcquired();
822 available_shm_segments_.push_back(shm_buffer.release());
823 }
824
825 void RTCVideoDecoder::CreateSHM(size_t count, size_t size) {
826 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
827 DVLOG(2) << "CreateSHM. count=" << count << ", size=" << size;
828
829 for (size_t i = 0; i < count; i++) {
830 std::unique_ptr<base::SharedMemory> shm =
831 factories_->CreateSharedMemory(size);
832 if (!shm) {
833 LOG(ERROR) << "Failed allocating shared memory of size=" << size;
834 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
835 return;
836 }
837
838 base::AutoLock auto_lock(lock_);
839 PutSHM_Locked(std::move(shm));
840 ++num_shm_buffers_;
841 }
842
843 // Kick off the decoding.
844 RequestBufferDecode();
845 }
846
847 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
848 input_buffer_data_.push_front(buffer_data);
849 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
850 // that's too small for some pathological B-frame test videos. The cost of
851 // using too-high a value is low (192 bits per extra slot).
852 static const size_t kMaxInputBufferDataSize = 128;
853 // Pop from the back of the list, because that's the oldest and least likely
854 // to be useful in the future data.
855 if (input_buffer_data_.size() > kMaxInputBufferDataSize)
856 input_buffer_data_.pop_back();
857 }
858
859 void RTCVideoDecoder::GetBufferData(int32_t bitstream_buffer_id,
860 uint32_t* timestamp,
861 gfx::Rect* visible_rect) {
862 for (const auto& buffer_data : input_buffer_data_) {
863 if (buffer_data.bitstream_buffer_id != bitstream_buffer_id)
864 continue;
865 *timestamp = buffer_data.timestamp;
866 *visible_rect = buffer_data.visible_rect;
867 return;
868 }
869 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
870 }
871
872 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
873 // Logging boolean is enough to know if HW decoding has been used. Also,
874 // InitDecode is less likely to return an error so enum is not used here.
875 bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
876 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
877 return status;
878 }
879
880 void RTCVideoDecoder::DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent()
881 const {
882 DCHECK(factories_->GetTaskRunner()->BelongsToCurrentThread());
883 }
884
885 void RTCVideoDecoder::ClearPendingBuffers() {
886 // Delete WebRTC input buffers.
887 for (const auto& pending_buffer : pending_buffers_)
888 delete[] pending_buffer.first._buffer;
889 pending_buffers_.clear();
890 }
891
892 void RTCVideoDecoder::TryResetVDAErrorCounter_Locked() {
893 lock_.AssertAcquired();
894
895 if (vda_error_counter_ == 0)
896 return;
897 vda_error_counter_ = 0;
898 }
899
900 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/rtc_video_decoder.h ('k') | content/renderer/media/rtc_video_decoder_factory.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698