OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/filters/ffmpeg_video_decoder.h" | 5 #include "media/filters/vpx_video_decoder.h" |
6 | |
7 #include <algorithm> | |
8 #include <string> | |
9 | 6 |
10 #include "base/bind.h" | 7 #include "base/bind.h" |
11 #include "base/callback_helpers.h" | 8 #include "base/callback_helpers.h" |
12 #include "base/command_line.h" | 9 #include "base/command_line.h" |
13 #include "base/location.h" | 10 #include "base/location.h" |
| 11 #include "base/logging.h" |
14 #include "base/message_loop_proxy.h" | 12 #include "base/message_loop_proxy.h" |
15 #include "base/string_number_conversions.h" | 13 #include "base/string_number_conversions.h" |
16 #include "media/base/bind_to_loop.h" | 14 #include "media/base/bind_to_loop.h" |
17 #include "media/base/decoder_buffer.h" | 15 #include "media/base/decoder_buffer.h" |
18 #include "media/base/demuxer_stream.h" | 16 #include "media/base/demuxer_stream.h" |
19 #include "media/base/limits.h" | |
20 #include "media/base/media_switches.h" | 17 #include "media/base/media_switches.h" |
21 #include "media/base/pipeline.h" | 18 #include "media/base/pipeline.h" |
22 #include "media/base/video_decoder_config.h" | 19 #include "media/base/video_decoder_config.h" |
23 #include "media/base/video_frame.h" | 20 #include "media/base/video_frame.h" |
24 #include "media/base/video_util.h" | 21 #include "media/base/video_util.h" |
25 #include "media/ffmpeg/ffmpeg_common.h" | 22 |
26 #include "media/filters/ffmpeg_glue.h" | 23 // Include libvpx header files. |
| 24 // VPX_CODEC_DISABLE_COMPAT excludes parts of the libvpx API that provide |
| 25 // backwards compatibility for legacy applications using the library. |
| 26 #define VPX_CODEC_DISABLE_COMPAT 1 |
| 27 extern "C" { |
| 28 #include "third_party/libvpx/libvpx.h" |
| 29 } |
27 | 30 |
28 namespace media { | 31 namespace media { |
29 | 32 |
30 // Always try to use three threads for video decoding. There is little reason | 33 // Always try to use three threads for video decoding. There is little reason |
31 // not to since current day CPUs tend to be multi-core and we measured | 34 // not to since current day CPUs tend to be multi-core and we measured |
32 // performance benefits on older machines such as P4s with hyperthreading. | 35 // performance benefits on older machines such as P4s with hyperthreading. |
33 // | |
34 // Handling decoding on separate threads also frees up the pipeline thread to | |
35 // continue processing. Although it'd be nice to have the option of a single | |
36 // decoding thread, FFmpeg treats having one thread the same as having zero | |
37 // threads (i.e., avcodec_decode_video() will execute on the calling thread). | |
38 // Yet another reason for having two threads :) | |
39 static const int kDecodeThreads = 2; | 36 static const int kDecodeThreads = 2; |
40 static const int kMaxDecodeThreads = 16; | 37 static const int kMaxDecodeThreads = 16; |
41 | 38 |
42 // Returns the number of threads given the FFmpeg CodecID. Also inspects the | 39 // Returns the number of threads. |
43 // command line for a valid --video-threads flag. | 40 static int GetThreadCount() { |
44 static int GetThreadCount(CodecID codec_id) { | 41 // TODO(scherkus): De-duplicate this function and the one used by |
| 42 // FFmpegVideoDecoder. |
| 43 |
45 // Refer to http://crbug.com/93932 for tsan suppressions on decoding. | 44 // Refer to http://crbug.com/93932 for tsan suppressions on decoding. |
46 int decode_threads = kDecodeThreads; | 45 int decode_threads = kDecodeThreads; |
47 | 46 |
48 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | 47 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
49 std::string threads(cmd_line->GetSwitchValueASCII(switches::kVideoThreads)); | 48 std::string threads(cmd_line->GetSwitchValueASCII(switches::kVideoThreads)); |
50 if (threads.empty() || !base::StringToInt(threads, &decode_threads)) | 49 if (threads.empty() || !base::StringToInt(threads, &decode_threads)) |
51 return decode_threads; | 50 return decode_threads; |
52 | 51 |
53 decode_threads = std::max(decode_threads, 0); | 52 decode_threads = std::max(decode_threads, 0); |
54 decode_threads = std::min(decode_threads, kMaxDecodeThreads); | 53 decode_threads = std::min(decode_threads, kMaxDecodeThreads); |
55 return decode_threads; | 54 return decode_threads; |
56 } | 55 } |
57 | 56 |
58 FFmpegVideoDecoder::FFmpegVideoDecoder( | 57 VpxVideoDecoder::VpxVideoDecoder( |
59 const scoped_refptr<base::MessageLoopProxy>& message_loop) | 58 const scoped_refptr<base::MessageLoopProxy>& message_loop) |
60 : message_loop_(message_loop), | 59 : message_loop_(message_loop), |
61 state_(kUninitialized), | 60 state_(kUninitialized), |
62 codec_context_(NULL), | 61 vpx_codec_(NULL) { |
63 av_frame_(NULL) { | |
64 } | 62 } |
65 | 63 |
66 int FFmpegVideoDecoder::GetVideoBuffer(AVCodecContext* codec_context, | 64 VpxVideoDecoder::~VpxVideoDecoder() { |
67 AVFrame* frame) { | 65 DCHECK_EQ(kUninitialized, state_); |
68 // Don't use |codec_context_| here! With threaded decoding, | 66 CloseDecoder(); |
69 // it will contain unsynchronized width/height/pix_fmt values, | |
70 // whereas |codec_context| contains the current threads's | |
71 // updated width/height/pix_fmt, which can change for adaptive | |
72 // content. | |
73 VideoFrame::Format format = PixelFormatToVideoFormat(codec_context->pix_fmt); | |
74 if (format == VideoFrame::INVALID) | |
75 return AVERROR(EINVAL); | |
76 DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16); | |
77 | |
78 gfx::Size size(codec_context->width, codec_context->height); | |
79 int ret; | |
80 if ((ret = av_image_check_size(size.width(), size.height(), 0, NULL)) < 0) | |
81 return ret; | |
82 | |
83 gfx::Size natural_size; | |
84 if (codec_context->sample_aspect_ratio.num > 0) { | |
85 natural_size = GetNaturalSize(size, | |
86 codec_context->sample_aspect_ratio.num, | |
87 codec_context->sample_aspect_ratio.den); | |
88 } else { | |
89 natural_size = demuxer_stream_->video_decoder_config().natural_size(); | |
90 } | |
91 | |
92 if (!VideoFrame::IsValidConfig(format, size, gfx::Rect(size), natural_size)) | |
93 return AVERROR(EINVAL); | |
94 | |
95 scoped_refptr<VideoFrame> video_frame = | |
96 VideoFrame::CreateFrame(format, size, gfx::Rect(size), natural_size, | |
97 kNoTimestamp()); | |
98 | |
99 for (int i = 0; i < 3; i++) { | |
100 frame->base[i] = video_frame->data(i); | |
101 frame->data[i] = video_frame->data(i); | |
102 frame->linesize[i] = video_frame->stride(i); | |
103 } | |
104 | |
105 frame->opaque = NULL; | |
106 video_frame.swap(reinterpret_cast<VideoFrame**>(&frame->opaque)); | |
107 frame->type = FF_BUFFER_TYPE_USER; | |
108 frame->pkt_pts = codec_context->pkt ? codec_context->pkt->pts : | |
109 AV_NOPTS_VALUE; | |
110 frame->width = codec_context->width; | |
111 frame->height = codec_context->height; | |
112 frame->format = codec_context->pix_fmt; | |
113 | |
114 return 0; | |
115 } | 67 } |
116 | 68 |
117 static int GetVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { | 69 void VpxVideoDecoder::Initialize( |
118 FFmpegVideoDecoder* vd = static_cast<FFmpegVideoDecoder*>(s->opaque); | 70 const scoped_refptr<DemuxerStream>& stream, |
119 return vd->GetVideoBuffer(s, frame); | 71 const PipelineStatusCB& status_cb, |
120 } | 72 const StatisticsCB& statistics_cb) { |
121 | |
122 static void ReleaseVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { | |
123 scoped_refptr<VideoFrame> video_frame; | |
124 video_frame.swap(reinterpret_cast<VideoFrame**>(&frame->opaque)); | |
125 | |
126 // The FFmpeg API expects us to zero the data pointers in | |
127 // this callback | |
128 memset(frame->data, 0, sizeof(frame->data)); | |
129 frame->opaque = NULL; | |
130 } | |
131 | |
132 void FFmpegVideoDecoder::Initialize(const scoped_refptr<DemuxerStream>& stream, | |
133 const PipelineStatusCB& status_cb, | |
134 const StatisticsCB& statistics_cb) { | |
135 DCHECK(message_loop_->BelongsToCurrentThread()); | 73 DCHECK(message_loop_->BelongsToCurrentThread()); |
136 PipelineStatusCB initialize_cb = BindToCurrentLoop(status_cb); | |
137 | |
138 FFmpegGlue::InitializeFFmpeg(); | |
139 DCHECK(!demuxer_stream_) << "Already initialized."; | 74 DCHECK(!demuxer_stream_) << "Already initialized."; |
140 | 75 |
141 if (!stream) { | 76 if (!stream) { |
142 initialize_cb.Run(PIPELINE_ERROR_DECODE); | 77 status_cb.Run(PIPELINE_ERROR_DECODE); |
143 return; | 78 return; |
144 } | 79 } |
145 | 80 |
146 demuxer_stream_ = stream; | 81 demuxer_stream_ = stream; |
147 statistics_cb_ = statistics_cb; | 82 statistics_cb_ = statistics_cb; |
148 | 83 |
149 if (!ConfigureDecoder()) { | 84 if (!ConfigureDecoder()) { |
150 initialize_cb.Run(DECODER_ERROR_NOT_SUPPORTED); | 85 status_cb.Run(DECODER_ERROR_NOT_SUPPORTED); |
151 return; | 86 return; |
152 } | 87 } |
153 | 88 |
154 // Success! | 89 // Success! |
155 state_ = kNormal; | 90 state_ = kNormal; |
156 initialize_cb.Run(PIPELINE_OK); | 91 status_cb.Run(PIPELINE_OK); |
157 } | 92 } |
158 | 93 |
159 void FFmpegVideoDecoder::Read(const ReadCB& read_cb) { | 94 bool VpxVideoDecoder::ConfigureDecoder() { |
| 95 const VideoDecoderConfig& config = demuxer_stream_->video_decoder_config(); |
| 96 if (!config.IsValidConfig()) { |
| 97 DLOG(ERROR) << "Invalid video stream config: " |
| 98 << config.AsHumanReadableString(); |
| 99 return false; |
| 100 } |
| 101 |
| 102 if (config.codec() != kCodecVP9) |
| 103 return false; |
| 104 |
| 105 CloseDecoder(); |
| 106 |
| 107 vpx_codec_ = new vpx_codec_ctx(); |
| 108 vpx_codec_dec_cfg_t vpx_config = {0}; |
| 109 vpx_config.w = config.coded_size().width(); |
| 110 vpx_config.h = config.coded_size().height(); |
| 111 vpx_config.threads = GetThreadCount(); |
| 112 |
| 113 vpx_codec_err_t status = vpx_codec_dec_init(vpx_codec_, |
| 114 vpx_codec_vp9_dx(), |
| 115 &vpx_config, |
| 116 0); |
| 117 if (status != VPX_CODEC_OK) { |
| 118 LOG(ERROR) << "vpx_codec_dec_init failed, status=" << status; |
| 119 delete vpx_codec_; |
| 120 vpx_codec_ = NULL; |
| 121 return false; |
| 122 } |
| 123 |
| 124 return true; |
| 125 } |
| 126 |
| 127 void VpxVideoDecoder::CloseDecoder() { |
| 128 if (vpx_codec_) { |
| 129 vpx_codec_destroy(vpx_codec_); |
| 130 delete vpx_codec_; |
| 131 vpx_codec_ = NULL; |
| 132 } |
| 133 } |
| 134 |
| 135 void VpxVideoDecoder::Read(const ReadCB& read_cb) { |
160 DCHECK(message_loop_->BelongsToCurrentThread()); | 136 DCHECK(message_loop_->BelongsToCurrentThread()); |
161 DCHECK(!read_cb.is_null()); | 137 DCHECK(!read_cb.is_null()); |
162 CHECK_NE(state_, kUninitialized); | 138 CHECK_NE(state_, kUninitialized); |
163 CHECK(read_cb_.is_null()) << "Overlapping decodes are not supported."; | 139 CHECK(read_cb_.is_null()) << "Overlapping decodes are not supported."; |
164 read_cb_ = BindToCurrentLoop(read_cb); | 140 read_cb_ = BindToCurrentLoop(read_cb); |
165 | 141 |
166 // Return empty frames if decoding has finished. | 142 // Return empty frames if decoding has finished. |
167 if (state_ == kDecodeFinished) { | 143 if (state_ == kDecodeFinished) { |
168 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame()); | 144 read_cb.Run(kOk, VideoFrame::CreateEmptyFrame()); |
169 return; | 145 return; |
170 } | 146 } |
171 | 147 |
172 ReadFromDemuxerStream(); | 148 ReadFromDemuxerStream(); |
173 } | 149 } |
174 | 150 |
175 void FFmpegVideoDecoder::Reset(const base::Closure& closure) { | 151 void VpxVideoDecoder::Reset(const base::Closure& closure) { |
176 DCHECK(message_loop_->BelongsToCurrentThread()); | 152 DCHECK(message_loop_->BelongsToCurrentThread()); |
177 DCHECK(reset_cb_.is_null()); | 153 DCHECK(reset_cb_.is_null()); |
178 reset_cb_ = BindToCurrentLoop(closure); | 154 reset_cb_ = BindToCurrentLoop(closure); |
179 | 155 |
180 // Defer the reset if a read is pending. | 156 // Defer the reset if a read is pending. |
181 if (!read_cb_.is_null()) | 157 if (!read_cb_.is_null()) |
182 return; | 158 return; |
183 | 159 |
184 DoReset(); | 160 DoReset(); |
185 } | 161 } |
186 | 162 |
187 void FFmpegVideoDecoder::DoReset() { | 163 void VpxVideoDecoder::Stop(const base::Closure& closure) { |
188 DCHECK(read_cb_.is_null()); | 164 DCHECK(message_loop_->BelongsToCurrentThread()); |
189 | 165 |
190 avcodec_flush_buffers(codec_context_); | 166 if (state_ == kUninitialized) { |
191 state_ = kNormal; | 167 closure.Run(); |
192 base::ResetAndReturn(&reset_cb_).Run(); | |
193 } | |
194 | |
195 void FFmpegVideoDecoder::Stop(const base::Closure& closure) { | |
196 DCHECK(message_loop_->BelongsToCurrentThread()); | |
197 base::ScopedClosureRunner runner(BindToCurrentLoop(closure)); | |
198 | |
199 if (state_ == kUninitialized) | |
200 return; | 168 return; |
| 169 } |
201 | 170 |
202 if (!read_cb_.is_null()) | 171 if (!read_cb_.is_null()) |
203 base::ResetAndReturn(&read_cb_).Run(kOk, NULL); | 172 base::ResetAndReturn(&read_cb_).Run(kOk, NULL); |
204 | 173 |
205 ReleaseFFmpegResources(); | |
206 state_ = kUninitialized; | 174 state_ = kUninitialized; |
| 175 closure.Run(); |
207 } | 176 } |
208 | 177 |
209 FFmpegVideoDecoder::~FFmpegVideoDecoder() { | 178 void VpxVideoDecoder::ReadFromDemuxerStream() { |
210 DCHECK_EQ(kUninitialized, state_); | |
211 DCHECK(!codec_context_); | |
212 DCHECK(!av_frame_); | |
213 } | |
214 | |
215 void FFmpegVideoDecoder::ReadFromDemuxerStream() { | |
216 DCHECK_NE(state_, kUninitialized); | 179 DCHECK_NE(state_, kUninitialized); |
217 DCHECK_NE(state_, kDecodeFinished); | 180 DCHECK_NE(state_, kDecodeFinished); |
218 DCHECK(!read_cb_.is_null()); | 181 DCHECK(!read_cb_.is_null()); |
219 | 182 |
220 demuxer_stream_->Read(base::Bind( | 183 demuxer_stream_->Read(base::Bind( |
221 &FFmpegVideoDecoder::BufferReady, this)); | 184 &VpxVideoDecoder::DoDecryptOrDecodeBuffer, this)); |
222 } | 185 } |
223 | 186 |
224 void FFmpegVideoDecoder::BufferReady( | 187 void VpxVideoDecoder::DoDecryptOrDecodeBuffer( |
225 DemuxerStream::Status status, | 188 DemuxerStream::Status status, |
226 const scoped_refptr<DecoderBuffer>& buffer) { | 189 const scoped_refptr<DecoderBuffer>& buffer) { |
227 DCHECK(message_loop_->BelongsToCurrentThread()); | 190 DCHECK(message_loop_->BelongsToCurrentThread()); |
228 DCHECK_NE(state_, kDecodeFinished); | 191 DCHECK_NE(state_, kDecodeFinished); |
229 DCHECK_EQ(status != DemuxerStream::kOk, !buffer) << status; | 192 DCHECK_EQ(status != DemuxerStream::kOk, !buffer) << status; |
230 | 193 |
231 if (state_ == kUninitialized) | 194 if (state_ == kUninitialized) |
232 return; | 195 return; |
233 | 196 |
234 DCHECK(!read_cb_.is_null()); | 197 DCHECK(!read_cb_.is_null()); |
(...skipping 16 matching lines...) Expand all Loading... |
251 } | 214 } |
252 | 215 |
253 ReadFromDemuxerStream(); | 216 ReadFromDemuxerStream(); |
254 return; | 217 return; |
255 } | 218 } |
256 | 219 |
257 DCHECK_EQ(status, DemuxerStream::kOk); | 220 DCHECK_EQ(status, DemuxerStream::kOk); |
258 DecodeBuffer(buffer); | 221 DecodeBuffer(buffer); |
259 } | 222 } |
260 | 223 |
261 void FFmpegVideoDecoder::DecodeBuffer( | 224 void VpxVideoDecoder::DecodeBuffer( |
262 const scoped_refptr<DecoderBuffer>& buffer) { | 225 const scoped_refptr<DecoderBuffer>& buffer) { |
263 DCHECK(message_loop_->BelongsToCurrentThread()); | 226 DCHECK(message_loop_->BelongsToCurrentThread()); |
264 DCHECK_NE(state_, kUninitialized); | 227 DCHECK_NE(state_, kUninitialized); |
265 DCHECK_NE(state_, kDecodeFinished); | 228 DCHECK_NE(state_, kDecodeFinished); |
266 DCHECK(reset_cb_.is_null()); | 229 DCHECK(reset_cb_.is_null()); |
267 DCHECK(!read_cb_.is_null()); | 230 DCHECK(!read_cb_.is_null()); |
268 DCHECK(buffer); | 231 DCHECK(buffer); |
269 | 232 |
270 // During decode, because reads are issued asynchronously, it is possible to | 233 // Transition to kDecodeFinished on the first end of stream buffer. |
271 // receive multiple end of stream buffers since each read is acked. When the | |
272 // first end of stream buffer is read, FFmpeg may still have frames queued | |
273 // up in the decoder so we need to go through the decode loop until it stops | |
274 // giving sensible data. After that, the decoder should output empty | |
275 // frames. There are three states the decoder can be in: | |
276 // | |
277 // kNormal: This is the starting state. Buffers are decoded. Decode errors | |
278 // are discarded. | |
279 // kFlushCodec: There isn't any more input data. Call avcodec_decode_video2 | |
280 // until no more data is returned to flush out remaining | |
281 // frames. The input buffer is ignored at this point. | |
282 // kDecodeFinished: All calls return empty frames. | |
283 // | |
284 // These are the possible state transitions. | |
285 // | |
286 // kNormal -> kFlushCodec: | |
287 // When buffer->IsEndOfStream() is first true. | |
288 // kNormal -> kDecodeFinished: | |
289 // A decoding error occurs and decoding needs to stop. | |
290 // kFlushCodec -> kDecodeFinished: | |
291 // When avcodec_decode_video2() returns 0 data or errors out. | |
292 // (any state) -> kNormal: | |
293 // Any time Reset() is called. | |
294 | |
295 // Transition to kFlushCodec on the first end of stream buffer. | |
296 if (state_ == kNormal && buffer->IsEndOfStream()) { | 234 if (state_ == kNormal && buffer->IsEndOfStream()) { |
297 state_ = kFlushCodec; | 235 state_ = kDecodeFinished; |
| 236 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame()); |
| 237 return; |
298 } | 238 } |
299 | 239 |
300 scoped_refptr<VideoFrame> video_frame; | 240 scoped_refptr<VideoFrame> video_frame; |
301 if (!Decode(buffer, &video_frame)) { | 241 if (!Decode(buffer, &video_frame)) { |
302 state_ = kDecodeFinished; | 242 state_ = kDecodeFinished; |
303 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL); | 243 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL); |
304 return; | 244 return; |
305 } | 245 } |
306 | 246 |
307 // Any successful decode counts! | 247 // Any successful decode counts! |
308 if (buffer->GetDataSize()) { | 248 if (buffer->GetDataSize()) { |
309 PipelineStatistics statistics; | 249 PipelineStatistics statistics; |
310 statistics.video_bytes_decoded = buffer->GetDataSize(); | 250 statistics.video_bytes_decoded = buffer->GetDataSize(); |
311 statistics_cb_.Run(statistics); | 251 statistics_cb_.Run(statistics); |
312 } | 252 } |
313 | 253 |
314 // If we didn't get a frame then we've either completely finished decoding or | 254 // If we didn't get a frame we need more data. |
315 // we need more data. | |
316 if (!video_frame) { | 255 if (!video_frame) { |
317 if (state_ == kFlushCodec) { | |
318 state_ = kDecodeFinished; | |
319 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame()); | |
320 return; | |
321 } | |
322 | |
323 ReadFromDemuxerStream(); | 256 ReadFromDemuxerStream(); |
324 return; | 257 return; |
325 } | 258 } |
326 | 259 |
327 base::ResetAndReturn(&read_cb_).Run(kOk, video_frame); | 260 base::ResetAndReturn(&read_cb_).Run(kOk, video_frame); |
328 } | 261 } |
329 | 262 |
330 bool FFmpegVideoDecoder::Decode( | 263 bool VpxVideoDecoder::Decode( |
331 const scoped_refptr<DecoderBuffer>& buffer, | 264 const scoped_refptr<DecoderBuffer>& buffer, |
332 scoped_refptr<VideoFrame>* video_frame) { | 265 scoped_refptr<VideoFrame>* video_frame) { |
333 DCHECK(video_frame); | 266 DCHECK(video_frame); |
334 | 267 |
335 // Create a packet for input data. | 268 // Pass |buffer| to libvpx. |
336 // Due to FFmpeg API changes we no longer have const read-only pointers. | 269 int64 timestamp = buffer->GetTimestamp().InMicroseconds(); |
337 AVPacket packet; | 270 void* user_priv = reinterpret_cast<void*>(×tamp); |
338 av_init_packet(&packet); | 271 vpx_codec_err_t status = vpx_codec_decode(vpx_codec_, |
339 packet.data = const_cast<uint8*>(buffer->GetData()); | 272 buffer->GetData(), |
340 packet.size = buffer->GetDataSize(); | 273 buffer->GetDataSize(), |
341 | 274 user_priv, |
342 // Let FFmpeg handle presentation timestamp reordering. | 275 0); |
343 codec_context_->reordered_opaque = buffer->GetTimestamp().InMicroseconds(); | 276 if (status != VPX_CODEC_OK) { |
344 | 277 LOG(ERROR) << "vpx_codec_decode() failed, status=" << status; |
345 // Reset frame to default values. | |
346 avcodec_get_frame_defaults(av_frame_); | |
347 | |
348 // This is for codecs not using get_buffer to initialize | |
349 // |av_frame_->reordered_opaque| | |
350 av_frame_->reordered_opaque = codec_context_->reordered_opaque; | |
351 | |
352 int frame_decoded = 0; | |
353 int result = avcodec_decode_video2(codec_context_, | |
354 av_frame_, | |
355 &frame_decoded, | |
356 &packet); | |
357 // Log the problem if we can't decode a video frame and exit early. | |
358 if (result < 0) { | |
359 LOG(ERROR) << "Error decoding a video frame with timestamp: " | |
360 << buffer->GetTimestamp().InMicroseconds() << " us, duration: " | |
361 << buffer->GetDuration().InMicroseconds() << " us, packet size: " | |
362 << buffer->GetDataSize() << " bytes"; | |
363 *video_frame = NULL; | |
364 return false; | 278 return false; |
365 } | 279 } |
366 | 280 |
367 // If no frame was produced then signal that more data is required to | 281 // Gets pointer to decoded data. |
368 // produce more frames. This can happen under two circumstances: | 282 vpx_codec_iter_t iter = NULL; |
369 // 1) Decoder was recently initialized/flushed | 283 const vpx_image_t* vpx_image = vpx_codec_get_frame(vpx_codec_, &iter); |
370 // 2) End of stream was reached and all internal frames have been output | 284 if (!vpx_image) { |
371 if (frame_decoded == 0) { | |
372 *video_frame = NULL; | 285 *video_frame = NULL; |
373 return true; | 286 return true; |
374 } | 287 } |
375 | 288 |
376 // TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675 | 289 if (vpx_image->user_priv != reinterpret_cast<void*>(×tamp)) { |
377 // The decoder is in a bad state and not decoding correctly. | 290 LOG(ERROR) << "Invalid output timestamp."; |
378 // Checking for NULL avoids a crash in CopyPlane(). | |
379 if (!av_frame_->data[VideoFrame::kYPlane] || | |
380 !av_frame_->data[VideoFrame::kUPlane] || | |
381 !av_frame_->data[VideoFrame::kVPlane]) { | |
382 LOG(ERROR) << "Video frame was produced yet has invalid frame data."; | |
383 *video_frame = NULL; | |
384 return false; | 291 return false; |
385 } | 292 } |
386 | 293 |
387 if (!av_frame_->opaque) { | 294 CopyVpxImageTo(vpx_image, video_frame); |
388 LOG(ERROR) << "VideoFrame object associated with frame data not set."; | 295 (*video_frame)->SetTimestamp(base::TimeDelta::FromMicroseconds(timestamp)); |
389 return false; | |
390 } | |
391 *video_frame = static_cast<VideoFrame*>(av_frame_->opaque); | |
392 | |
393 (*video_frame)->SetTimestamp( | |
394 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque)); | |
395 | |
396 return true; | 296 return true; |
397 } | 297 } |
398 | 298 |
399 void FFmpegVideoDecoder::ReleaseFFmpegResources() { | 299 void VpxVideoDecoder::DoReset() { |
400 if (codec_context_) { | 300 DCHECK(read_cb_.is_null()); |
401 av_free(codec_context_->extradata); | 301 |
402 avcodec_close(codec_context_); | 302 state_ = kNormal; |
403 av_free(codec_context_); | 303 reset_cb_.Run(); |
404 codec_context_ = NULL; | 304 reset_cb_.Reset(); |
405 } | |
406 if (av_frame_) { | |
407 av_free(av_frame_); | |
408 av_frame_ = NULL; | |
409 } | |
410 } | 305 } |
411 | 306 |
412 bool FFmpegVideoDecoder::ConfigureDecoder() { | 307 void VpxVideoDecoder::CopyVpxImageTo( |
413 const VideoDecoderConfig& config = demuxer_stream_->video_decoder_config(); | 308 const vpx_image* vpx_image, |
| 309 scoped_refptr<VideoFrame>* video_frame) { |
| 310 CHECK(vpx_image); |
| 311 CHECK_EQ(vpx_image->d_w % 2, 0U); |
| 312 CHECK_EQ(vpx_image->d_h % 2, 0U); |
| 313 CHECK(vpx_image->fmt == VPX_IMG_FMT_I420 || |
| 314 vpx_image->fmt == VPX_IMG_FMT_YV12); |
414 | 315 |
415 if (!config.IsValidConfig()) { | 316 gfx::Size size(vpx_image->d_w, vpx_image->d_h); |
416 DLOG(ERROR) << "Invalid video stream - " << config.AsHumanReadableString(); | 317 gfx::Size natural_size = |
417 return false; | 318 demuxer_stream_->video_decoder_config().natural_size(); |
418 } | |
419 | 319 |
420 if (config.is_encrypted()) { | 320 *video_frame = VideoFrame::CreateFrame(VideoFrame::YV12, |
421 DLOG(ERROR) << "Encrypted video stream not supported."; | 321 size, |
422 return false; | 322 gfx::Rect(size), |
423 } | 323 natural_size, |
424 | 324 kNoTimestamp()); |
425 // Release existing decoder resources if necessary. | 325 CopyYPlane(vpx_image->planes[VPX_PLANE_Y], |
426 ReleaseFFmpegResources(); | 326 vpx_image->stride[VPX_PLANE_Y], |
427 | 327 vpx_image->d_h, |
428 // Initialize AVCodecContext structure. | 328 *video_frame); |
429 codec_context_ = avcodec_alloc_context3(NULL); | 329 CopyUPlane(vpx_image->planes[VPX_PLANE_U], |
430 VideoDecoderConfigToAVCodecContext(config, codec_context_); | 330 vpx_image->stride[VPX_PLANE_U], |
431 | 331 vpx_image->d_h / 2, |
432 // Enable motion vector search (potentially slow), strong deblocking filter | 332 *video_frame); |
433 // for damaged macroblocks, and set our error detection sensitivity. | 333 CopyVPlane(vpx_image->planes[VPX_PLANE_V], |
434 codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; | 334 vpx_image->stride[VPX_PLANE_V], |
435 codec_context_->thread_count = GetThreadCount(codec_context_->codec_id); | 335 vpx_image->d_h / 2, |
436 codec_context_->opaque = this; | 336 *video_frame); |
437 codec_context_->flags |= CODEC_FLAG_EMU_EDGE; | |
438 codec_context_->get_buffer = GetVideoBufferImpl; | |
439 codec_context_->release_buffer = ReleaseVideoBufferImpl; | |
440 | |
441 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id); | |
442 if (!codec || avcodec_open2(codec_context_, codec, NULL) < 0) { | |
443 ReleaseFFmpegResources(); | |
444 return false; | |
445 } | |
446 | |
447 av_frame_ = avcodec_alloc_frame(); | |
448 return true; | |
449 } | 337 } |
450 | 338 |
451 } // namespace media | 339 } // namespace media |
OLD | NEW |