Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1050)

Side by Side Diff: content/renderer/media/gpu/rtc_video_decoder.cc

Issue 2363303002: [WIP] Proxy RtcVideoDecoder calls to a media::VideoDecoder.
Patch Set: Now working with remote ffmpeg decoder Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/gpu/rtc_video_decoder.h" 5 #include "content/renderer/media/gpu/rtc_video_decoder.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/logging.h" 10 #include "base/logging.h"
11 #include "base/memory/ref_counted.h" 11 #include "base/memory/ref_counted.h"
12 #include "base/metrics/histogram_macros.h" 12 #include "base/metrics/histogram_macros.h"
13 #include "base/numerics/safe_conversions.h" 13 #include "base/numerics/safe_conversions.h"
14 #include "base/synchronization/waitable_event.h" 14 #include "base/synchronization/waitable_event.h"
15 #include "base/task_runner_util.h" 15 #include "base/task_runner_util.h"
16 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" 16 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h"
17 #include "gpu/command_buffer/common/mailbox_holder.h" 17 #include "gpu/command_buffer/common/mailbox_holder.h"
18 #include "media/base/bind_to_current_loop.h" 18 #include "media/base/bind_to_current_loop.h"
19 #include "media/base/decoder_buffer.h"
20 #include "media/base/encryption_scheme.h"
21 #include "media/base/video_decoder.h"
19 #include "media/renderers/gpu_video_accelerator_factories.h" 22 #include "media/renderers/gpu_video_accelerator_factories.h"
20 #include "third_party/skia/include/core/SkBitmap.h" 23 #include "third_party/skia/include/core/SkBitmap.h"
21 #include "third_party/webrtc/base/bind.h" 24 #include "third_party/webrtc/base/bind.h"
22 #include "third_party/webrtc/base/refcount.h" 25 #include "third_party/webrtc/base/refcount.h"
23 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" 26 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h"
24 #include "third_party/webrtc/video_frame.h" 27 #include "third_party/webrtc/video_frame.h"
25 28
26 #if defined(OS_WIN) 29 #if defined(OS_WIN)
27 #include "base/command_line.h" 30 #include "base/command_line.h"
28 #include "base/win/windows_version.h" 31 #include "base/win/windows_version.h"
29 #include "content/public/common/content_switches.h" 32 #include "content/public/common/content_switches.h"
30 #endif // defined(OS_WIN) 33 #endif // defined(OS_WIN)
31 34
32 namespace content { 35 namespace content {
33 36
34 const int32_t RTCVideoDecoder::ID_LAST = 0x3FFFFFFF; 37 const int32_t RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
35 const int32_t RTCVideoDecoder::ID_HALF = 0x20000000; 38 const int32_t RTCVideoDecoder::ID_HALF = 0x20000000;
36 const int32_t RTCVideoDecoder::ID_INVALID = -1; 39 const int32_t RTCVideoDecoder::ID_INVALID = -1;
37 40
38 // Number of consecutive frames that can be lost due to a VDA error before 41 // Number of consecutive frames that can be lost due to a VDA error before
39 // falling back to SW implementation. 42 // falling back to SW implementation.
40 const uint32_t kNumVDAErrorsBeforeSWFallback = 5; 43 const uint32_t kNumDecoderErrorsBeforeSWFallback = 5;
41
42 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
43 // Higher values allow better pipelining in the GPU, but also require more
44 // resources.
45 static const size_t kMaxInFlightDecodes = 8;
46
47 // Number of allocated shared memory segments.
48 static const size_t kNumSharedMemorySegments = 16;
49 44
50 // Maximum number of pending WebRTC buffers that are waiting for shared memory. 45 // Maximum number of pending WebRTC buffers that are waiting for shared memory.
51 static const size_t kMaxNumOfPendingBuffers = 8; 46 static const size_t kMaxNumOfPendingBuffers = 8;
52 47
53 RTCVideoDecoder::BufferData::BufferData(int32_t bitstream_buffer_id, 48 scoped_refptr<media::DecoderBuffer> CreateDecoderBuffer(
54 uint32_t timestamp, 49 const webrtc::EncodedImage& encoded_image) {
55 size_t size, 50 auto decoder_buffer = media::DecoderBuffer::CopyFrom(encoded_image._buffer,
56 const gfx::Rect& visible_rect) 51 encoded_image._length);
57 : bitstream_buffer_id(bitstream_buffer_id), 52 decoder_buffer->set_timestamp(
58 timestamp(timestamp), 53 base::TimeDelta::FromInternalValue(encoded_image._timeStamp));
59 size(size), 54 return decoder_buffer;
60 visible_rect(visible_rect) {} 55 }
61 56
62 RTCVideoDecoder::BufferData::BufferData() {} 57 RTCVideoDecoder::RTCVideoDecoder(
63 58 webrtc::VideoCodecType type,
64 RTCVideoDecoder::BufferData::~BufferData() {} 59 const CreateVideoDecoderCB& create_video_decoder_cb,
65 60 const scoped_refptr<base::SingleThreadTaskRunner>& decoder_task_runner)
66 RTCVideoDecoder::RTCVideoDecoder(webrtc::VideoCodecType type, 61 : decoder_error_counter_(0),
67 media::GpuVideoAcceleratorFactories* factories)
68 : vda_error_counter_(0),
69 video_codec_type_(type), 62 video_codec_type_(type),
70 factories_(factories), 63 create_video_decoder_cb_(create_video_decoder_cb),
71 decoder_texture_target_(0), 64 decoder_task_runner_(decoder_task_runner),
72 pixel_format_(media::PIXEL_FORMAT_UNKNOWN), 65 pixel_format_(media::PIXEL_FORMAT_UNKNOWN),
73 next_picture_buffer_id_(0),
74 state_(UNINITIALIZED), 66 state_(UNINITIALIZED),
75 decode_complete_callback_(nullptr), 67 decode_complete_callback_(nullptr),
76 num_shm_buffers_(0), 68 next_decoder_buffer_id_(0),
77 next_bitstream_buffer_id_(0), 69 reset_decoder_buffer_id_(ID_INVALID),
78 reset_bitstream_buffer_id_(ID_INVALID),
79 weak_factory_(this) { 70 weak_factory_(this) {
80 DCHECK(!factories_->GetTaskRunner()->BelongsToCurrentThread()); 71 DCHECK(!decoder_task_runner_->BelongsToCurrentThread());
81 } 72 }
82 73
83 RTCVideoDecoder::~RTCVideoDecoder() { 74 RTCVideoDecoder::~RTCVideoDecoder() {
84 DVLOG(2) << "~RTCVideoDecoder"; 75 DVLOG(2) << "~RTCVideoDecoder";
85 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); 76 DCheckDecoderTaskRunnerIsCurrent();
86 DestroyVDA();
87 77
88 // Delete all shared memories. 78 // Grab the lock so that the function doesn't DCHECK on us.
89 ClearPendingBuffers(); 79 base::AutoLock auto_lock(lock_);
80 ClearPendingBuffers_Locked();
90 } 81 }
91 82
92 // static 83 // static
93 std::unique_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( 84 std::unique_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
94 webrtc::VideoCodecType type, 85 webrtc::VideoCodecType type,
95 media::GpuVideoAcceleratorFactories* factories) { 86 const CreateVideoDecoderCB& create_video_decoder_cb,
87 const scoped_refptr<base::SingleThreadTaskRunner>& decoder_task_runner) {
88 VLOG(0) << __func__;
96 std::unique_ptr<RTCVideoDecoder> decoder; 89 std::unique_ptr<RTCVideoDecoder> decoder;
97 // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5717. 90 // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5717.
98 #if defined(OS_WIN) 91 #if defined(OS_WIN)
99 if (!base::CommandLine::ForCurrentProcess()->HasSwitch( 92 if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
100 switches::kEnableWin7WebRtcHWH264Decoding) && 93 switches::kEnableWin7WebRtcHWH264Decoding) &&
101 type == webrtc::kVideoCodecH264 && 94 type == webrtc::kVideoCodecH264 &&
102 base::win::GetVersion() == base::win::VERSION_WIN7) { 95 base::win::GetVersion() == base::win::VERSION_WIN7) {
103 DLOG(ERROR) << "H264 HW decoding on Win7 is not supported."; 96 DLOG(ERROR) << "H264 HW decoding on Win7 is not supported.";
104 return decoder; 97 return decoder;
105 } 98 }
106 #endif // defined(OS_WIN) 99 #endif // defined(OS_WIN)
100
107 // Convert WebRTC codec type to media codec profile. 101 // Convert WebRTC codec type to media codec profile.
102 // TODO(slan): This should produce a VideoDecoderConfig object.
108 media::VideoCodecProfile profile; 103 media::VideoCodecProfile profile;
104 media::VideoCodec codec;
109 switch (type) { 105 switch (type) {
110 case webrtc::kVideoCodecVP8: 106 case webrtc::kVideoCodecVP8:
111 profile = media::VP8PROFILE_ANY; 107 profile = media::VP8PROFILE_ANY;
108 codec = media::kCodecVP8;
112 break; 109 break;
113 case webrtc::kVideoCodecH264: 110 case webrtc::kVideoCodecH264:
114 profile = media::H264PROFILE_MAIN; 111 profile = media::H264PROFILE_MAIN;
112 codec = media::kCodecH264;
115 break; 113 break;
116 default: 114 default:
117 DVLOG(2) << "Video codec not supported:" << type; 115 VLOG(0) << "Video codec not supported:" << type;
118 return decoder; 116 return decoder;
119 } 117 }
120 118
121 base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL, 119 base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::MANUAL,
122 base::WaitableEvent::InitialState::NOT_SIGNALED); 120 base::WaitableEvent::InitialState::NOT_SIGNALED);
123 decoder.reset(new RTCVideoDecoder(type, factories)); 121 decoder.reset(
124 decoder->factories_->GetTaskRunner()->PostTask( 122 new RTCVideoDecoder(type, create_video_decoder_cb, decoder_task_runner));
123 decoder_task_runner->PostTask(
125 FROM_HERE, 124 FROM_HERE,
126 base::Bind(&RTCVideoDecoder::CreateVDA, 125 base::Bind(&RTCVideoDecoder::InitializeDecoder,
127 base::Unretained(decoder.get()), 126 base::Unretained(decoder.get()), profile, codec, &waiter));
128 profile, 127 VLOG(0) << "TaskPosted, thread locked.";
129 &waiter)); 128 DCHECK(!decoder_task_runner->BelongsToCurrentThread());
130 waiter.Wait(); 129 // waiter.Wait();
131 // |decoder->vda_| is nullptr if the codec is not supported. 130 if (true) { // decoder->decoder_) {
132 if (decoder->vda_) 131 VLOG(0) << "Decoder is initialized!";
133 decoder->state_ = INITIALIZED; 132 decoder->state_ = INITIALIZED;
134 else 133 } else {
135 factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder.release()); 134 VLOG(0) << "Decoder not initialized!";
135 decoder_task_runner->DeleteSoon(FROM_HERE, decoder.release());
136 }
136 return decoder; 137 return decoder;
137 } 138 }
138 139
139 // static 140 // static
140 void RTCVideoDecoder::Destroy(webrtc::VideoDecoder* decoder, 141 void RTCVideoDecoder::Destroy(
141 media::GpuVideoAcceleratorFactories* factories) { 142 webrtc::VideoDecoder* decoder,
142 factories->GetTaskRunner()->DeleteSoon(FROM_HERE, decoder); 143 const scoped_refptr<base::SingleThreadTaskRunner>& decoder_task_runner) {
144 decoder_task_runner->DeleteSoon(FROM_HERE, decoder);
143 } 145 }
144 146
145 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, 147 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
146 int32_t /*numberOfCores*/) { 148 int32_t /*numberOfCores*/) {
147 DVLOG(2) << "InitDecode"; 149 VLOG(0) << __func__;
148 DCHECK_EQ(video_codec_type_, codecSettings->codecType); 150 DCHECK_EQ(video_codec_type_, codecSettings->codecType);
149 if (codecSettings->codecType == webrtc::kVideoCodecVP8 && 151 if (codecSettings->codecType == webrtc::kVideoCodecVP8 &&
150 codecSettings->codecSpecific.VP8.feedbackModeOn) { 152 codecSettings->codecSpecific.VP8.feedbackModeOn) {
151 LOG(ERROR) << "Feedback mode not supported"; 153 LOG(ERROR) << "Feedback mode not supported";
152 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR); 154 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
153 } 155 }
154 156
155 base::AutoLock auto_lock(lock_); 157 base::AutoLock auto_lock(lock_);
156 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { 158 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
157 LOG(ERROR) << "VDA is not initialized. state=" << state_; 159 LOG(ERROR) << "VDA is not initialized. state=" << state_;
158 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED); 160 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
159 } 161 }
160 162
161 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK); 163 return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
162 } 164 }
163 165
164 int32_t RTCVideoDecoder::Decode( 166 int32_t RTCVideoDecoder::Decode(
165 const webrtc::EncodedImage& inputImage, 167 const webrtc::EncodedImage& inputImage,
166 bool missingFrames, 168 bool missingFrames,
167 const webrtc::RTPFragmentationHeader* /*fragmentation*/, 169 const webrtc::RTPFragmentationHeader* /*fragmentation*/,
168 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, 170 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
169 int64_t /*renderTimeMs*/) { 171 int64_t /*renderTimeMs*/) {
170 DVLOG(3) << "Decode"; 172 VLOG(0) << __func__ << " " << inputImage._timeStamp;
171 173
174 // NOTE(slan): WTF, this whole method is locked???
172 base::AutoLock auto_lock(lock_); 175 base::AutoLock auto_lock(lock_);
173 176
174 if (state_ == UNINITIALIZED || !decode_complete_callback_) { 177 if (state_ == UNINITIALIZED || !decode_complete_callback_) {
175 LOG(ERROR) << "The decoder has not initialized."; 178 LOG(ERROR) << "The decoder has not initialized.";
176 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 179 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
177 } 180 }
178 181
179 if (state_ == DECODE_ERROR) { 182 if (state_ == DECODE_ERROR) {
180 LOG(ERROR) << "Decoding error occurred."; 183 LOG(ERROR) << "Decoding error occurred.";
181 // Try reseting the session up to |kNumVDAErrorsHandled| times. 184 // Try reseting the session up to |kNumVDAErrorsHandled| times.
182 // Check if SW H264 implementation is available before falling back. 185 // Check if SW H264 implementation is available before falling back.
183 if (vda_error_counter_ > kNumVDAErrorsBeforeSWFallback && 186 if (decoder_error_counter_ > kNumDecoderErrorsBeforeSWFallback &&
184 (video_codec_type_ != webrtc::kVideoCodecH264 || 187 (video_codec_type_ != webrtc::kVideoCodecH264 ||
185 webrtc::H264Decoder::IsSupported())) { 188 webrtc::H264Decoder::IsSupported())) {
186 DLOG(ERROR) << vda_error_counter_ 189 DLOG(ERROR) << decoder_error_counter_
187 << " errors reported by VDA, falling back to software decode"; 190 << " errors reported by VDA, falling back to software decode";
188 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; 191 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
189 } 192 }
190 base::AutoUnlock auto_unlock(lock_); 193 base::AutoUnlock auto_unlock(lock_);
191 Release(); 194 Release();
192 return WEBRTC_VIDEO_CODEC_ERROR; 195 return WEBRTC_VIDEO_CODEC_ERROR;
193 } 196 }
194 197
195 if (missingFrames || !inputImage._completeFrame) { 198 if (missingFrames || !inputImage._completeFrame) {
196 DLOG(ERROR) << "Missing or incomplete frames."; 199 LOG(ERROR) << "Missing or incomplete frames.";
197 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. 200 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
198 // Return an error to request a key frame. 201 // Return an error to request a key frame.
199 return WEBRTC_VIDEO_CODEC_ERROR; 202 return WEBRTC_VIDEO_CODEC_ERROR;
200 } 203 }
201 204
202 // Most platforms' VDA implementations support mid-stream resolution change 205 // Most platforms' VDA implementations support mid-stream resolution change
203 // internally. Platforms whose VDAs fail to support mid-stream resolution 206 // internally. Platforms whose VDAs fail to support mid-stream resolution
204 // change gracefully need to have their clients cover for them, and we do that 207 // change gracefully need to have their clients cover for them, and we do that
205 // here. 208 // here.
206 #ifdef ANDROID 209 #ifdef ANDROID
207 const bool kVDACanHandleMidstreamResize = false; 210 const bool kVDACanHandleMidstreamResize = false;
208 #else 211 #else
209 const bool kVDACanHandleMidstreamResize = true; 212 const bool kVDACanHandleMidstreamResize = true;
210 #endif 213 #endif
211 214
212 bool need_to_reset_for_midstream_resize = false; 215 bool need_to_reset_for_midstream_resize = false;
213 if (inputImage._frameType == webrtc::kVideoFrameKey) { 216 if (inputImage._frameType == webrtc::kVideoFrameKey) {
214 const gfx::Size new_frame_size(inputImage._encodedWidth, 217 const gfx::Size new_frame_size(inputImage._encodedWidth,
215 inputImage._encodedHeight); 218 inputImage._encodedHeight);
216 DVLOG(2) << "Got key frame. size=" << new_frame_size.ToString(); 219 VLOG(0) << "Got key frame. size=" << new_frame_size.ToString();
217 220
218 if (new_frame_size.width() > max_resolution_.width() || 221 if (new_frame_size.width() > max_resolution_.width() ||
219 new_frame_size.width() < min_resolution_.width() || 222 new_frame_size.width() < min_resolution_.width() ||
220 new_frame_size.height() > max_resolution_.height() || 223 new_frame_size.height() > max_resolution_.height() ||
221 new_frame_size.height() < min_resolution_.height()) { 224 new_frame_size.height() < min_resolution_.height()) {
222 DVLOG(1) << "Resolution unsupported, falling back to software decode"; 225 VLOG(0) << "Resolution unsupported, falling back to software decode";
223 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; 226 return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
224 } 227 }
225 228
226 gfx::Size prev_frame_size = frame_size_; 229 gfx::Size prev_frame_size = frame_size_;
227 frame_size_ = new_frame_size; 230 frame_size_ = new_frame_size;
228 if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() && 231 if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
229 prev_frame_size != frame_size_) { 232 prev_frame_size != frame_size_) {
230 need_to_reset_for_midstream_resize = true; 233 need_to_reset_for_midstream_resize = true;
231 } 234 }
232 } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_, 235 } else if (IsFirstBufferAfterReset(next_decoder_buffer_id_,
233 reset_bitstream_buffer_id_)) { 236 reset_decoder_buffer_id_)) {
234 // TODO(wuchengli): VDA should handle it. Remove this when 237 // TODO(wuchengli): VDA should handle it. Remove this when
235 // http://crosbug.com/p/21913 is fixed. 238 // http://crosbug.com/p/21913 is fixed.
236 239
237 // If we're are in an error condition, increase the counter. 240 // If we're are in an error condition, increase the counter.
238 vda_error_counter_ += vda_error_counter_ ? 1 : 0; 241 decoder_error_counter_ += decoder_error_counter_ ? 1 : 0;
239 242
240 DVLOG(1) << "The first frame should be a key frame. Drop this."; 243 VLOG(0) << "The first frame should be a key frame. Drop this.";
241 return WEBRTC_VIDEO_CODEC_ERROR; 244 return WEBRTC_VIDEO_CODEC_ERROR;
242 } 245 }
243 246
244 // Create buffer metadata. 247 int32_t decoder_buffer_id = next_decoder_buffer_id_;
245 BufferData buffer_data(next_bitstream_buffer_id_, 248 auto decoder_buffer = CreateDecoderBuffer(inputImage);
246 inputImage._timeStamp, 249
247 inputImage._length,
248 gfx::Rect(frame_size_));
249 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. 250 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
250 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST; 251 next_decoder_buffer_id_ = (next_decoder_buffer_id_ + 1) & ID_LAST;
251 252
252 // If a shared memory segment is available, there are no pending buffers, and 253 // Try to enqueue the image to be decoded. These frames will be consumed on
253 // this isn't a mid-stream resolution change, then send the buffer for decode 254 // the decoder thread. It would be ideal to post this to the decoder thread to
254 // immediately. Otherwise, save the buffer in the queue for later decode. 255 // avoid the lock, but we need to return an error from this function if
255 std::unique_ptr<base::SharedMemory> shm_buffer; 256 // |pending_buffers_| is full. So use a lock instead.
256 if (!need_to_reset_for_midstream_resize && pending_buffers_.empty()) 257 if (!SaveToPendingBuffers_Locked(decoder_buffer_id, decoder_buffer)) {
257 shm_buffer = GetSHM_Locked(inputImage._length); 258 // We have exceeded the pending buffers count, we are severely behind.
258 if (!shm_buffer) { 259 // Since we are returning ERROR, WebRTC will not be interested in the
259 if (!SaveToPendingBuffers_Locked(inputImage, buffer_data)) { 260 // remaining buffers, and will provide us with a new keyframe instead.
260 // We have exceeded the pending buffers count, we are severely behind. 261 // Better to drop any pending buffers and start afresh to catch up faster.
261 // Since we are returning ERROR, WebRTC will not be interested in the 262 VLOG(0) << "Exceeded maximum pending buffer count, dropping";
262 // remaining buffers, and will provide us with a new keyframe instead. 263 ClearPendingBuffers_Locked();
263 // Better to drop any pending buffers and start afresh to catch up faster. 264 return WEBRTC_VIDEO_CODEC_ERROR;
264 DVLOG(1) << "Exceeded maximum pending buffer count, dropping";
265 ClearPendingBuffers();
266 return WEBRTC_VIDEO_CODEC_ERROR;
267 }
268
269 if (need_to_reset_for_midstream_resize) {
270 base::AutoUnlock auto_unlock(lock_);
271 Release();
272 }
273 return WEBRTC_VIDEO_CODEC_OK;
274 } 265 }
275 266
276 SaveToDecodeBuffers_Locked(inputImage, std::move(shm_buffer), buffer_data); 267 if (need_to_reset_for_midstream_resize) {
277 factories_->GetTaskRunner()->PostTask( 268 base::AutoUnlock auto_unlock(lock_);
278 FROM_HERE, 269 Release();
279 base::Bind(&RTCVideoDecoder::RequestBufferDecode, 270 }
280 weak_factory_.GetWeakPtr())); 271
272 decoder_task_runner_->PostTask(
273 FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode,
274 weak_factory_.GetWeakPtr()));
281 return WEBRTC_VIDEO_CODEC_OK; 275 return WEBRTC_VIDEO_CODEC_OK;
282 } 276 }
283 277
284 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( 278 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
285 webrtc::DecodedImageCallback* callback) { 279 webrtc::DecodedImageCallback* callback) {
286 DVLOG(2) << "RegisterDecodeCompleteCallback"; 280 VLOG(0) << __func__;
287 DCHECK(callback); 281 DCHECK(callback);
282
283 // This is so terrible. Lock here when setting so that we can use this
284 // callback on the decoder thread. TODO(slan): See if we can avoid this.
288 base::AutoLock auto_lock(lock_); 285 base::AutoLock auto_lock(lock_);
289 decode_complete_callback_ = callback; 286 decode_complete_callback_ = callback;
290 return WEBRTC_VIDEO_CODEC_OK; 287 return WEBRTC_VIDEO_CODEC_OK;
291 } 288 }
292 289
293 int32_t RTCVideoDecoder::Release() { 290 int32_t RTCVideoDecoder::Release() {
294 DVLOG(2) << "Release"; 291 VLOG(0) << __func__;
295 // Do not destroy VDA because WebRTC can call InitDecode and start decoding 292 // Do not destroy VDA because WebRTC can call InitDecode and start decoding
296 // again. 293 // again.
297 base::AutoLock auto_lock(lock_); 294 base::AutoLock auto_lock(lock_);
298 if (state_ == UNINITIALIZED) { 295 if (state_ == UNINITIALIZED) {
299 LOG(ERROR) << "Decoder not initialized."; 296 LOG(ERROR) << "Decoder not initialized.";
300 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 297 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
301 } 298 }
302 if (next_bitstream_buffer_id_ != 0) 299
303 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1; 300 //
301 if (next_decoder_buffer_id_ != 0)
302 reset_decoder_buffer_id_ = next_decoder_buffer_id_ - 1;
304 else 303 else
305 reset_bitstream_buffer_id_ = ID_LAST; 304 reset_decoder_buffer_id_ = ID_LAST;
306 // If VDA is already resetting, no need to request the reset again. 305 // If VDA is already resetting, no need to request the reset again.
307 if (state_ != RESETTING) { 306 if (state_ != RESETTING) {
308 state_ = RESETTING; 307 state_ = RESETTING;
309 factories_->GetTaskRunner()->PostTask( 308 decoder_task_runner_->PostTask(
310 FROM_HERE, 309 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetOnDecoderThread,
311 base::Bind(&RTCVideoDecoder::ResetInternal, 310 weak_factory_.GetWeakPtr()));
312 weak_factory_.GetWeakPtr()));
313 } 311 }
314 return WEBRTC_VIDEO_CODEC_OK; 312 return WEBRTC_VIDEO_CODEC_OK;
315 } 313 }
316 314
317 void RTCVideoDecoder::ProvidePictureBuffers(uint32_t count, 315 void RTCVideoDecoder::OnResetDone() {
318 media::VideoPixelFormat format, 316 VLOG(0) << __func__;
319 uint32_t textures_per_buffer, 317 DCheckDecoderTaskRunnerIsCurrent();
320 const gfx::Size& size,
321 uint32_t texture_target) {
322 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
323 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
324 DCHECK_EQ(1u, textures_per_buffer);
325
326 if (!vda_)
327 return;
328
329 std::vector<uint32_t> texture_ids;
330 std::vector<gpu::Mailbox> texture_mailboxes;
331 decoder_texture_target_ = texture_target;
332
333 if (format == media::PIXEL_FORMAT_UNKNOWN)
334 format = media::PIXEL_FORMAT_ARGB;
335
336 if ((pixel_format_ != media::PIXEL_FORMAT_UNKNOWN) &&
337 (format != pixel_format_)) {
338 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
339 return;
340 }
341
342 pixel_format_ = format;
343 if (!factories_->CreateTextures(count,
344 size,
345 &texture_ids,
346 &texture_mailboxes,
347 decoder_texture_target_)) {
348 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
349 return;
350 }
351 DCHECK_EQ(count, texture_ids.size());
352 DCHECK_EQ(count, texture_mailboxes.size());
353
354 std::vector<media::PictureBuffer> picture_buffers;
355 for (size_t i = 0; i < texture_ids.size(); ++i) {
356 media::PictureBuffer::TextureIds ids;
357 ids.push_back(texture_ids[i]);
358 std::vector<gpu::Mailbox> mailboxes;
359 mailboxes.push_back(texture_mailboxes[i]);
360
361 picture_buffers.push_back(
362 media::PictureBuffer(next_picture_buffer_id_++, size, ids, mailboxes));
363 bool inserted = assigned_picture_buffers_.insert(std::make_pair(
364 picture_buffers.back().id(), picture_buffers.back())).second;
365 DCHECK(inserted);
366 }
367 vda_->AssignPictureBuffers(picture_buffers);
368 }
369
370 void RTCVideoDecoder::DismissPictureBuffer(int32_t id) {
371 DVLOG(3) << "DismissPictureBuffer. id=" << id;
372 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
373
374 std::map<int32_t, media::PictureBuffer>::iterator it =
375 assigned_picture_buffers_.find(id);
376 if (it == assigned_picture_buffers_.end()) {
377 NOTREACHED() << "Missing picture buffer: " << id;
378 return;
379 }
380
381 media::PictureBuffer buffer_to_dismiss = it->second;
382 assigned_picture_buffers_.erase(it);
383
384 if (!picture_buffers_at_display_.count(id)) {
385 // We can delete the texture immediately as it's not being displayed.
386 factories_->DeleteTexture(buffer_to_dismiss.texture_ids()[0]);
387 return;
388 }
389 // Not destroying a texture in display in |picture_buffers_at_display_|.
390 // Postpone deletion until after it's returned to us.
391 }
392
393 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
394 DVLOG(3) << "PictureReady";
395 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
396
397 std::map<int32_t, media::PictureBuffer>::iterator it =
398 assigned_picture_buffers_.find(picture.picture_buffer_id());
399 if (it == assigned_picture_buffers_.end()) {
400 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
401 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
402 return;
403 }
404
405 uint32_t timestamp = 0;
406 gfx::Rect visible_rect;
407 GetBufferData(picture.bitstream_buffer_id(), &timestamp, &visible_rect);
408 if (!picture.visible_rect().IsEmpty())
409 visible_rect = picture.visible_rect();
410
411 const media::PictureBuffer& pb = it->second;
412 if (visible_rect.IsEmpty() || !gfx::Rect(pb.size()).Contains(visible_rect)) {
413 LOG(ERROR) << "Invalid picture size: " << visible_rect.ToString()
414 << " should fit in " << pb.size().ToString();
415 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
416 return;
417 }
418
419 scoped_refptr<media::VideoFrame> frame =
420 CreateVideoFrame(picture, pb, timestamp, visible_rect, pixel_format_);
421 if (!frame) {
422 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
423 return;
424 }
425 bool inserted = picture_buffers_at_display_
426 .insert(std::make_pair(picture.picture_buffer_id(),
427 pb.texture_ids()[0]))
428 .second;
429 DCHECK(inserted);
430
431 // Create a WebRTC video frame.
432 webrtc::VideoFrame decoded_image(
433 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), timestamp, 0,
434 webrtc::kVideoRotation_0);
435
436 // Invoke decode callback. WebRTC expects no callback after Release.
437 {
438 base::AutoLock auto_lock(lock_);
439 DCHECK(decode_complete_callback_);
440 if (IsBufferAfterReset(picture.bitstream_buffer_id(),
441 reset_bitstream_buffer_id_)) {
442 decode_complete_callback_->Decoded(decoded_image);
443 }
444 // Reset error counter as we successfully decoded a frame.
445 vda_error_counter_ = 0;
446 }
447 }
448
449 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
450 const media::Picture& picture,
451 const media::PictureBuffer& pb,
452 uint32_t timestamp,
453 const gfx::Rect& visible_rect,
454 media::VideoPixelFormat pixel_format) {
455 DCHECK(decoder_texture_target_);
456 // Convert timestamp from 90KHz to ms.
457 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
458 base::checked_cast<uint64_t>(timestamp) * 1000 / 90);
459 // TODO(mcasas): The incoming data may actually be in a YUV format, but may be
460 // labelled as ARGB. This may or may not be reported by VDA, depending on
461 // whether it provides an implementation of VDA::GetOutputFormat().
462 // This prevents the compositor from messing with it, since the underlying
463 // platform can handle the former format natively. Make sure the
464 // correct format is used and everyone down the line understands it.
465 gpu::MailboxHolder holders[media::VideoFrame::kMaxPlanes] = {
466 gpu::MailboxHolder(pb.texture_mailbox(0), gpu::SyncToken(),
467 decoder_texture_target_)};
468 scoped_refptr<media::VideoFrame> frame =
469 media::VideoFrame::WrapNativeTextures(
470 pixel_format, holders,
471 media::BindToCurrentLoop(base::Bind(
472 &RTCVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(),
473 factories_, picture.picture_buffer_id(), pb.texture_ids()[0])),
474 pb.size(), visible_rect, visible_rect.size(), timestamp_ms);
475 if (frame && picture.allow_overlay()) {
476 frame->metadata()->SetBoolean(media::VideoFrameMetadata::ALLOW_OVERLAY,
477 true);
478 }
479 return frame;
480 }
481
482 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32_t id) {
483 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
484 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
485
486 auto it = bitstream_buffers_in_decoder_.find(id);
487 if (it == bitstream_buffers_in_decoder_.end()) {
488 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
489 NOTREACHED() << "Missing bitstream buffer: " << id;
490 return;
491 }
492
493 {
494 base::AutoLock auto_lock(lock_);
495 PutSHM_Locked(std::move(it->second));
496 }
497 bitstream_buffers_in_decoder_.erase(it);
498
499 RequestBufferDecode();
500 }
501
502 void RTCVideoDecoder::NotifyFlushDone() {
503 DVLOG(3) << "NotifyFlushDone";
504 NOTREACHED() << "Unexpected flush done notification.";
505 }
506
507 void RTCVideoDecoder::NotifyResetDone() {
508 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
509 DVLOG(3) << "NotifyResetDone";
510
511 if (!vda_)
512 return;
513
514 input_buffer_data_.clear();
515 { 318 {
516 base::AutoLock auto_lock(lock_); 319 base::AutoLock auto_lock(lock_);
517 state_ = INITIALIZED; 320 state_ = INITIALIZED;
518 } 321 }
519 // Send the pending buffers for decoding. 322 // Send the pending buffers for decoding.
520 RequestBufferDecode(); 323 RequestBufferDecode();
521 } 324 }
522 325
523 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { 326 // DEBUG_NOTE(slan): This function captures the WebRTC-specific state of
524 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); 327 // NotifyError() and DestroyVDA() from the old implementation. All VDA/buffer
525 if (!vda_) 328 // management is handled by GpuVideoDecoder.
329 void RTCVideoDecoder::OnBufferDecoded(int32_t buffer_decode_id,
330 bool is_eos,
331 media::DecodeStatus status) {
332 VLOG(0) << __func__;
333 DCheckDecoderTaskRunnerIsCurrent();
334
335 // This buffer is no longer in flight. Remove it from the in-flight buffers.
336 bool erased = buffers_in_decoder_.erase(buffer_decode_id);
337 DCHECK(erased);
338
339 // DEBUG_NOTE(slan): Not sure if we need to do any special handling here or
340 // not. It should be sufficient to simply kick off another Decode, to emulate
341 // functionality from NotifyEndOfStream.
342 if (is_eos) {
343 // TODO(slan): Anything to do here?
344 }
345
346 // This is only called when |decoder_| is torn down while buffers are in
347 // flight. This probably indicates that something has gone wrong, so let's
348 // do NOTREACHED() now and handle this later.
349 if (status == media::DecodeStatus::ABORTED) {
350 NOTREACHED();
351 } else if (status == media::DecodeStatus::OK) {
352 RequestBufferDecode();
526 return; 353 return;
354 }
527 355
528 LOG(ERROR) << "VDA Error:" << error; 356 // If we hit here, |decoder_| is bubbling up an error from the remote decoder.
529 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError", error, 357 DCHECK(status == media::DecodeStatus::DECODE_ERROR);
358
359 // DEBUG_NOTE(slan): Functionality from old NotifyError().
360 // This could be problematic as WebRTC relies on a custom UMA stat. For now
361 // return a random error.
362 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError", -1,
530 media::VideoDecodeAccelerator::ERROR_MAX + 1); 363 media::VideoDecodeAccelerator::ERROR_MAX + 1);
531 DestroyVDA();
532 364
533 base::AutoLock auto_lock(lock_); 365 // DEBUG_NOTE(slan): Emulating functionality from old DestroyVDA().
534 state_ = DECODE_ERROR; 366 // Re-enqueue patches in-flight so we can attempt to decode them again. Insert
535 ++vda_error_counter_; 367 // them back into the queue in their original order.
368 std::map<int32_t, scoped_refptr<media::DecoderBuffer>> sorted(
369 buffers_in_decoder_.begin(), buffers_in_decoder_.end());
370 for (auto rit = sorted.rbegin(); rit != sorted.rend(); ++rit)
371 pending_buffers_.push_front(std::make_pair(rit->first, rit->second));
372
373 // DEBUG_NOTE(slan): Functionality from old NotifyError().
374 {
375 base::AutoLock auto_lock(lock_);
376 state_ = DECODE_ERROR;
377 ++decoder_error_counter_;
378 }
536 } 379 }
537 380
538 void RTCVideoDecoder::RequestBufferDecode() { 381 void RTCVideoDecoder::RequestBufferDecode() {
539 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); 382 VLOG(0) << __func__;
540 if (!vda_) 383 DCheckDecoderTaskRunnerIsCurrent();
541 return;
542 384
543 MovePendingBuffersToDecodeBuffers(); 385 // If there are buffers pending, and less than the maximum possible number of
544 386 // decode requests are in flight, push more into the decoder.
545 while (CanMoreDecodeWorkBeDone()) { 387 while (!pending_buffers_.empty() && CanMoreDecodeWorkBeDone()) {
546 // Get a buffer and data from the queue. 388 int32_t decoder_buffer_id = pending_buffers_.front().first;
547 std::unique_ptr<base::SharedMemory> shm_buffer; 389 scoped_refptr<media::DecoderBuffer> decoder_buffer =
548 BufferData buffer_data; 390 pending_buffers_.front().second;
549 { 391 {
392 // Do not request decode if decoder_ is resetting.
550 base::AutoLock auto_lock(lock_); 393 base::AutoLock auto_lock(lock_);
551 // Do not request decode if VDA is resetting. 394 if (state_ == RESETTING)
552 if (decode_buffers_.empty() || state_ == RESETTING)
553 return; 395 return;
554 shm_buffer = std::move(decode_buffers_.front().first);
555 buffer_data = decode_buffers_.front().second;
556 decode_buffers_.pop_front();
557 // Drop the buffers before Release is called.
558 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
559 reset_bitstream_buffer_id_)) {
560 PutSHM_Locked(std::move(shm_buffer));
561 continue;
562 }
563 } 396 }
564 397
565 // Create a BitstreamBuffer and send to VDA to decode. 398 // Remove the buffer from the queue.
566 media::BitstreamBuffer bitstream_buffer( 399 pending_buffers_.pop_front();
567 buffer_data.bitstream_buffer_id, shm_buffer->handle(), buffer_data.size, 400
568 0, base::TimeDelta::FromInternalValue(buffer_data.timestamp)); 401 // If the buffer is from before the last call to Release(), drop it on
569 const bool inserted = bitstream_buffers_in_decoder_ 402 // the floor and keep going.
570 .insert(std::make_pair(bitstream_buffer.id(), 403 if (!IsBufferAfterReset(decoder_buffer_id, reset_decoder_buffer_id_)) {
571 std::move(shm_buffer))) 404 continue;
572 .second; 405 }
573 DCHECK(inserted) << "bitstream_buffer_id " << bitstream_buffer.id() 406
574 << " existed already in bitstream_buffers_in_decoder_"; 407 // Push the buffer to the decoder.
575 RecordBufferData(buffer_data); 408 DCHECK(!base::ContainsKey(buffers_in_decoder_, decoder_buffer_id));
576 vda_->Decode(bitstream_buffer); 409 buffers_in_decoder_[decoder_buffer_id] = decoder_buffer;
410 decoder_->Decode(
411 decoder_buffer,
412 base::Bind(&RTCVideoDecoder::OnBufferDecoded, base::Unretained(this),
413 decoder_buffer_id, false /* is_eos */));
577 } 414 }
578 } 415 }
579 416
580 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { 417 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
581 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; 418 VLOG(0) << __func__;
419 return (static_cast<int>(buffers_in_decoder_.size()) <
420 decoder_->GetMaxDecodeRequests());
582 } 421 }
583 422
584 bool RTCVideoDecoder::IsBufferAfterReset(int32_t id_buffer, int32_t id_reset) { 423 bool RTCVideoDecoder::IsBufferAfterReset(int32_t id_buffer, int32_t id_reset) {
424 VLOG(0) << __func__;
585 if (id_reset == ID_INVALID) 425 if (id_reset == ID_INVALID)
586 return true; 426 return true;
587 int32_t diff = id_buffer - id_reset; 427 int32_t diff = id_buffer - id_reset;
588 if (diff <= 0) 428 if (diff <= 0)
589 diff += ID_LAST + 1; 429 diff += ID_LAST + 1;
590 return diff < ID_HALF; 430 return diff < ID_HALF;
591 } 431 }
592 432
593 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32_t id_buffer, 433 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32_t id_buffer,
594 int32_t id_reset) { 434 int32_t id_reset) {
435 VLOG(0) << __func__;
595 if (id_reset == ID_INVALID) 436 if (id_reset == ID_INVALID)
596 return id_buffer == 0; 437 return id_buffer == 0;
597 return id_buffer == ((id_reset + 1) & ID_LAST); 438 return id_buffer == ((id_reset + 1) & ID_LAST);
598 } 439 }
599 440
600 void RTCVideoDecoder::SaveToDecodeBuffers_Locked( 441 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
601 const webrtc::EncodedImage& input_image, 442 int32_t decoder_buffer_id,
602 std::unique_ptr<base::SharedMemory> shm_buffer, 443 const scoped_refptr<media::DecoderBuffer>& decoder_buffer) {
603 const BufferData& buffer_data) { 444 VLOG(0) << "SaveToPendingBuffers_Locked"
604 memcpy(shm_buffer->memory(), input_image._buffer, input_image._length); 445 << ". pending_buffers size=" << pending_buffers_.size();
446 lock_.AssertAcquired();
605 447
606 // Store the buffer and the metadata to the queue.
607 decode_buffers_.emplace_back(std::move(shm_buffer), buffer_data);
608 }
609
610 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
611 const webrtc::EncodedImage& input_image,
612 const BufferData& buffer_data) {
613 DVLOG(2) << "SaveToPendingBuffers_Locked"
614 << ". pending_buffers size=" << pending_buffers_.size()
615 << ". decode_buffers_ size=" << decode_buffers_.size()
616 << ". available_shm size=" << available_shm_segments_.size();
617 // Queued too many buffers. Something goes wrong. 448 // Queued too many buffers. Something goes wrong.
618 if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) { 449 if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
619 LOG(WARNING) << "Too many pending buffers!"; 450 LOG(WARNING) << "Too many pending buffers!";
620 return false; 451 return false;
621 } 452 }
622 453
623 // Clone the input image and save it to the queue. 454 // Enqueue the buffer, so that it may be consumed by the decoder.
624 uint8_t* buffer = new uint8_t[input_image._length]; 455 // TODO(slan): Use std::move all the way down to cut down on atomic ops.
625 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode() 456 pending_buffers_.push_back(std::make_pair(decoder_buffer_id, decoder_buffer));
626 // interface to take a non-const ptr to the frame and add a method to the
627 // frame that will swap buffers with another.
628 memcpy(buffer, input_image._buffer, input_image._length);
629 webrtc::EncodedImage encoded_image(
630 buffer, input_image._length, input_image._length);
631 std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
632 std::make_pair(encoded_image, buffer_data);
633
634 pending_buffers_.push_back(buffer_pair);
635 return true; 457 return true;
636 } 458 }
637 459
638 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() { 460 void RTCVideoDecoder::ResetOnDecoderThread() {
461 VLOG(0) << __func__;
462 DCheckDecoderTaskRunnerIsCurrent();
463 DCHECK(decoder_);
464 // The decoder is resetting.
639 base::AutoLock auto_lock(lock_); 465 base::AutoLock auto_lock(lock_);
640 while (pending_buffers_.size() > 0) { 466 state_ = RESETTING;
641 // Get a pending buffer from the queue. 467 decoder_->Reset(
642 const webrtc::EncodedImage& input_image = pending_buffers_.front().first; 468 base::Bind(&RTCVideoDecoder::OnResetDone, base::Unretained(this)));
643 const BufferData& buffer_data = pending_buffers_.front().second;
644
645 // Drop the frame if it comes before Release.
646 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
647 reset_bitstream_buffer_id_)) {
648 delete[] input_image._buffer;
649 pending_buffers_.pop_front();
650 continue;
651 }
652 // Get shared memory and save it to decode buffers.
653 std::unique_ptr<base::SharedMemory> shm_buffer =
654 GetSHM_Locked(input_image._length);
655 if (!shm_buffer)
656 return;
657 SaveToDecodeBuffers_Locked(input_image, std::move(shm_buffer), buffer_data);
658 delete[] input_image._buffer;
659 pending_buffers_.pop_front();
660 }
661 }
662
663 void RTCVideoDecoder::ResetInternal() {
664 DVLOG(2) << __func__;
665 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
666
667 if (vda_) {
668 vda_->Reset();
669 } else {
670 CreateVDA(vda_codec_profile_, nullptr);
671 if (vda_) {
672 base::AutoLock auto_lock(lock_);
673 state_ = INITIALIZED;
674 }
675 }
676 }
677
678 // static
679 void RTCVideoDecoder::ReleaseMailbox(
680 base::WeakPtr<RTCVideoDecoder> decoder,
681 media::GpuVideoAcceleratorFactories* factories,
682 int64_t picture_buffer_id,
683 uint32_t texture_id,
684 const gpu::SyncToken& release_sync_token) {
685 DCHECK(factories->GetTaskRunner()->BelongsToCurrentThread());
686 factories->WaitSyncToken(release_sync_token);
687
688 if (decoder) {
689 decoder->ReusePictureBuffer(picture_buffer_id);
690 return;
691 }
692 // It's the last chance to delete the texture after display,
693 // because RTCVideoDecoder was destructed.
694 factories->DeleteTexture(texture_id);
695 }
696
697 void RTCVideoDecoder::ReusePictureBuffer(int64_t picture_buffer_id) {
698 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
699 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
700
701 DCHECK(!picture_buffers_at_display_.empty());
702 PictureBufferTextureMap::iterator display_iterator =
703 picture_buffers_at_display_.find(picture_buffer_id);
704 uint32_t texture_id = display_iterator->second;
705 DCHECK(display_iterator != picture_buffers_at_display_.end());
706 picture_buffers_at_display_.erase(display_iterator);
707
708 if (!assigned_picture_buffers_.count(picture_buffer_id)) {
709 // This picture was dismissed while in display, so we postponed deletion.
710 factories_->DeleteTexture(texture_id);
711 return;
712 }
713
714 // DestroyVDA() might already have been called.
715 if (vda_)
716 vda_->ReusePictureBuffer(picture_buffer_id);
717 } 469 }
718 470
719 bool RTCVideoDecoder::IsProfileSupported(media::VideoCodecProfile profile) { 471 bool RTCVideoDecoder::IsProfileSupported(media::VideoCodecProfile profile) {
720 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); 472 VLOG(0) << __func__;
721 media::VideoDecodeAccelerator::Capabilities capabilities = 473 DCheckDecoderTaskRunnerIsCurrent();
722 factories_->GetVideoDecodeAcceleratorCapabilities();
723 474
724 for (const auto& supported_profile : capabilities.supported_profiles) { 475 // TODO(slan): Figure out how to do capabilites.
725 if (profile == supported_profile.profile) { 476 min_resolution_ = gfx::Size(0, 0);
726 min_resolution_ = supported_profile.min_resolution; 477 max_resolution_ = gfx::Size(4000, 4000);
727 max_resolution_ = supported_profile.max_resolution; 478 return true;
728 return true;
729 }
730 }
731
732 return false;
733 } 479 }
734 480
735 void RTCVideoDecoder::CreateVDA(media::VideoCodecProfile profile, 481 void RTCVideoDecoder::InitializeDecoder(media::VideoCodecProfile profile,
736 base::WaitableEvent* waiter) { 482 media::VideoCodec codec,
737 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); 483 base::WaitableEvent* waiter) {
484 VLOG(0) << __func__;
485 DCheckDecoderTaskRunnerIsCurrent();
486
487 decoder_ = create_video_decoder_cb_.Run();
738 488
739 if (!IsProfileSupported(profile)) { 489 if (!IsProfileSupported(profile)) {
740 DVLOG(1) << "Unsupported profile " << profile; 490 DVLOG(1) << "Unsupported profile " << profile;
741 } else { 491 } else {
742 vda_ = factories_->CreateVideoDecodeAccelerator(); 492 // TODO(slan): Pass in a media::VideoDecoderConfig to this class. The only
743 493 // things that seem to matter here are populated.
744 media::VideoDecodeAccelerator::Config config(profile); 494 media::VideoDecoderConfig config(
745 if (vda_ && !vda_->Initialize(config, this)) 495 codec, profile, media::PIXEL_FORMAT_ARGB, /* Not used */
746 vda_.release()->Destroy(); 496 media::COLOR_SPACE_UNSPECIFIED, /* not used */
497 gfx::Size(320, 240), /* coded_size - default value in VDA::Client */
498 gfx::Rect(0, 0, 320, 240), /* visible_rect - Not used */
499 gfx::Size(320, 240), /* natural_size - figure this out. */
500 std::vector<uint8_t>(), /* extra_data - not used */
501 media::EncryptionScheme());
502 DCHECK(config.IsValidConfig()) << config.AsHumanReadableString();
503 decoder_->Initialize(
504 config, true /* low_delay */, nullptr /* cdm_context */,
505 base::Bind(&RTCVideoDecoder::OnDecoderInitialized,
506 base::Unretained(this), waiter),
507 base::Bind(&RTCVideoDecoder::OnFrameReady, base::Unretained(this)));
747 vda_codec_profile_ = profile; 508 vda_codec_profile_ = profile;
748 } 509 }
749
750 if (waiter)
751 waiter->Signal();
752 } 510 }
753 511
754 void RTCVideoDecoder::DestroyTextures() { 512 void RTCVideoDecoder::OnDecoderInitialized(base::WaitableEvent* waiter,
755 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); 513 bool success) {
514 VLOG(0) << __func__;
515 DCheckDecoderTaskRunnerIsCurrent();
756 516
757 // Not destroying PictureBuffers in |picture_buffers_at_display_| yet, since 517 // TODO(slan): Figure out how to handle this case better. For now, let's get
758 // their textures may still be in use by the user of this RTCVideoDecoder. 518 // it working.
759 for (const auto& picture_buffer_at_display : picture_buffers_at_display_) 519 if (!success) {
760 assigned_picture_buffers_.erase(picture_buffer_at_display.first); 520 base::AutoLock lock(lock_);
521 state_ = DECODE_ERROR;
522 }
761 523
762 for (const auto& assigned_picture_buffer : assigned_picture_buffers_) 524 // // TODO(slan): Figure out how to handle this case better. For now, let's
763 factories_->DeleteTexture(assigned_picture_buffer.second.texture_ids()[0]); 525 // get
526 // // it working.
527 // CHECK(success);
764 528
765 assigned_picture_buffers_.clear(); 529 // // Update the internal state of the decoder.
530 // {
531 // base::AutoLock lock(lock_);
532 // state_ = INITIALIZED;
533 // }
534
535 // // Release the WebRTC thread, indicating we are ready to start decoding.
536 // if (waiter)
537 // waiter->Signal();
766 } 538 }
767 539
768 void RTCVideoDecoder::DestroyVDA() { 540 void RTCVideoDecoder::OnFrameReady(
769 DVLOG(2) << "DestroyVDA"; 541 const scoped_refptr<media::VideoFrame>& frame) {
770 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent(); 542 VLOG(0) << __func__;
771 if (vda_) 543 DCheckDecoderTaskRunnerIsCurrent();
772 vda_.release()->Destroy();
773 DestroyTextures();
774 544
775 base::AutoLock auto_lock(lock_); 545 // DEBUG - Comment this in to make the stream all-black.
546 // frame = media::VideoFrame::CreateBlackFrame(frame->natural_size());
776 547
777 // Put the buffers back in case we restart the decoder. 548 // Create a WebRTC video frame.
778 for (auto& buffer : bitstream_buffers_in_decoder_) 549 webrtc::VideoFrame decoded_image(
779 PutSHM_Locked(std::move(buffer.second)); 550 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame),
780 bitstream_buffers_in_decoder_.clear(); 551 frame->timestamp().ToInternalValue(), 0, webrtc::kVideoRotation_0);
781 552
782 state_ = UNINITIALIZED; 553 // DEBUG_NOTE(slan): In the original code from PictureReady(), the code inside
783 } 554 // the lock looks something like this:
555 //
556 // // Invoke decode callback. WebRTC expects no callback after Release.
557 // if (IsBufferAfterReset(picture.bitstream_buffer_id(),
558 // reset_decoder_buffer_id_)) {
559 // decode_complete_callback_->Decoded(decoded_image);
560 // }
561 // decoder_error_counter_ = 0;
562 //
563 // This code makes sure that all frames that had been sent to the decoder
564 // service, but had not returned yet, are not called back to WebRTC. This
565 // *should* be handled by GpuVideoDecoder::DeliverFrame, which drops any
566 // frames that are returned while a VDA::Reset() call is pending. The VDA
567 // should also flush its pending frames when it recieves the Reset()
568 // command.
784 569
785 std::unique_ptr<base::SharedMemory> RTCVideoDecoder::GetSHM_Locked( 570 // Lock and pass the frame up to the WebRTC client class.
786 size_t min_size) { 571 {
787 // Reuse a SHM if possible. 572 base::AutoLock auto_lock(lock_);
788 if (!available_shm_segments_.empty() && 573 DCHECK(decode_complete_callback_);
789 available_shm_segments_.back()->mapped_size() >= min_size) { 574 decode_complete_callback_->Decoded(decoded_image);
790 std::unique_ptr<base::SharedMemory> buffer = 575
791 std::move(available_shm_segments_.back()); 576 // Reset error counter as we successfully decoded a frame.
792 available_shm_segments_.pop_back(); 577 decoder_error_counter_ = 0;
793 return buffer;
794 } 578 }
795
796 if (available_shm_segments_.size() != num_shm_buffers_) {
797 // Either available_shm_segments_ is empty (and we already have some SHM
798 // buffers allocated), or the size of available segments is not large
799 // enough. In the former case we need to wait for buffers to be returned,
800 // in the latter we need to wait for all buffers to be returned to drop
801 // them and reallocate with a new size.
802 return NULL;
803 }
804
805 if (num_shm_buffers_ != 0) {
806 available_shm_segments_.clear();
807 num_shm_buffers_ = 0;
808 }
809
810 // Create twice as large buffers as required, to avoid frequent reallocation.
811 factories_->GetTaskRunner()->PostTask(
812 FROM_HERE,
813 base::Bind(&RTCVideoDecoder::CreateSHM, weak_factory_.GetWeakPtr(),
814 kNumSharedMemorySegments, min_size * 2));
815
816 // We'll be called again after the shared memory is created.
817 return NULL;
818 }
819
820 void RTCVideoDecoder::PutSHM_Locked(
821 std::unique_ptr<base::SharedMemory> shm_buffer) {
822 lock_.AssertAcquired();
823 available_shm_segments_.push_back(std::move(shm_buffer));
824 }
825
826 void RTCVideoDecoder::CreateSHM(size_t count, size_t size) {
827 DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
828 DVLOG(2) << "CreateSHM. count=" << count << ", size=" << size;
829
830 for (size_t i = 0; i < count; i++) {
831 std::unique_ptr<base::SharedMemory> shm =
832 factories_->CreateSharedMemory(size);
833 if (!shm) {
834 LOG(ERROR) << "Failed allocating shared memory of size=" << size;
835 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
836 return;
837 }
838
839 base::AutoLock auto_lock(lock_);
840 PutSHM_Locked(std::move(shm));
841 ++num_shm_buffers_;
842 }
843
844 // Kick off the decoding.
845 RequestBufferDecode();
846 }
847
848 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
849 input_buffer_data_.push_front(buffer_data);
850 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
851 // that's too small for some pathological B-frame test videos. The cost of
852 // using too-high a value is low (192 bits per extra slot).
853 static const size_t kMaxInputBufferDataSize = 128;
854 // Pop from the back of the list, because that's the oldest and least likely
855 // to be useful in the future data.
856 if (input_buffer_data_.size() > kMaxInputBufferDataSize)
857 input_buffer_data_.pop_back();
858 }
859
860 void RTCVideoDecoder::GetBufferData(int32_t bitstream_buffer_id,
861 uint32_t* timestamp,
862 gfx::Rect* visible_rect) {
863 for (const auto& buffer_data : input_buffer_data_) {
864 if (buffer_data.bitstream_buffer_id != bitstream_buffer_id)
865 continue;
866 *timestamp = buffer_data.timestamp;
867 *visible_rect = buffer_data.visible_rect;
868 return;
869 }
870 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
871 } 579 }
872 580
873 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) { 581 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
582 VLOG(0) << __func__;
874 // Logging boolean is enough to know if HW decoding has been used. Also, 583 // Logging boolean is enough to know if HW decoding has been used. Also,
875 // InitDecode is less likely to return an error so enum is not used here. 584 // InitDecode is less likely to return an error so enum is not used here.
876 bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false; 585 bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
877 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample); 586 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
878 return status; 587 return status;
879 } 588 }
880 589
881 void RTCVideoDecoder::DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent() 590 void RTCVideoDecoder::DCheckDecoderTaskRunnerIsCurrent() const {
882 const { 591 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
883 DCHECK(factories_->GetTaskRunner()->BelongsToCurrentThread());
884 } 592 }
885 593
886 void RTCVideoDecoder::ClearPendingBuffers() { 594 void RTCVideoDecoder::ClearPendingBuffers_Locked() {
887 // Delete WebRTC input buffers. 595 VLOG(0) << __func__;
888 for (const auto& pending_buffer : pending_buffers_) 596 lock_.AssertAcquired();
889 delete[] pending_buffer.first._buffer;
890 pending_buffers_.clear(); 597 pending_buffers_.clear();
891 } 598 }
892 599
893 } // namespace content 600 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/gpu/rtc_video_decoder.h ('k') | content/renderer/media/gpu/rtc_video_decoder_factory.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698