OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/filters/ffmpeg_video_decoder.h" | 5 #include "media/filters/ffmpeg_video_decoder.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <string> | 8 #include <string> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
11 #include "base/callback_helpers.h" | 11 #include "base/callback_helpers.h" |
12 #include "base/command_line.h" | 12 #include "base/command_line.h" |
13 #include "base/location.h" | 13 #include "base/location.h" |
14 #include "base/message_loop_proxy.h" | 14 #include "base/message_loop_proxy.h" |
15 #include "base/string_number_conversions.h" | 15 #include "base/string_number_conversions.h" |
16 #include "media/base/bind_to_loop.h" | 16 #include "media/base/bind_to_loop.h" |
17 #include "media/base/decoder_buffer.h" | 17 #include "media/base/decoder_buffer.h" |
18 #include "media/base/demuxer_stream.h" | |
19 #include "media/base/limits.h" | 18 #include "media/base/limits.h" |
20 #include "media/base/media_switches.h" | 19 #include "media/base/media_switches.h" |
21 #include "media/base/pipeline.h" | 20 #include "media/base/pipeline.h" |
22 #include "media/base/video_decoder_config.h" | 21 #include "media/base/video_decoder_config.h" |
23 #include "media/base/video_frame.h" | 22 #include "media/base/video_frame.h" |
24 #include "media/base/video_util.h" | 23 #include "media/base/video_util.h" |
25 #include "media/ffmpeg/ffmpeg_common.h" | 24 #include "media/ffmpeg/ffmpeg_common.h" |
26 #include "media/filters/ffmpeg_glue.h" | 25 #include "media/filters/ffmpeg_glue.h" |
27 | 26 |
28 namespace media { | 27 namespace media { |
(...skipping 25 matching lines...) Expand all Loading... |
54 decode_threads = std::min(decode_threads, kMaxDecodeThreads); | 53 decode_threads = std::min(decode_threads, kMaxDecodeThreads); |
55 return decode_threads; | 54 return decode_threads; |
56 } | 55 } |
57 | 56 |
58 FFmpegVideoDecoder::FFmpegVideoDecoder( | 57 FFmpegVideoDecoder::FFmpegVideoDecoder( |
59 const scoped_refptr<base::MessageLoopProxy>& message_loop) | 58 const scoped_refptr<base::MessageLoopProxy>& message_loop) |
60 : message_loop_(message_loop), | 59 : message_loop_(message_loop), |
61 weak_factory_(this), | 60 weak_factory_(this), |
62 state_(kUninitialized), | 61 state_(kUninitialized), |
63 codec_context_(NULL), | 62 codec_context_(NULL), |
64 av_frame_(NULL), | 63 av_frame_(NULL) { |
65 demuxer_stream_(NULL) { | |
66 } | 64 } |
67 | 65 |
68 int FFmpegVideoDecoder::GetVideoBuffer(AVCodecContext* codec_context, | 66 int FFmpegVideoDecoder::GetVideoBuffer(AVCodecContext* codec_context, |
69 AVFrame* frame) { | 67 AVFrame* frame) { |
70 // Don't use |codec_context_| here! With threaded decoding, | 68 // Don't use |codec_context_| here! With threaded decoding, |
71 // it will contain unsynchronized width/height/pix_fmt values, | 69 // it will contain unsynchronized width/height/pix_fmt values, |
72 // whereas |codec_context| contains the current threads's | 70 // whereas |codec_context| contains the current threads's |
73 // updated width/height/pix_fmt, which can change for adaptive | 71 // updated width/height/pix_fmt, which can change for adaptive |
74 // content. | 72 // content. |
75 VideoFrame::Format format = PixelFormatToVideoFormat(codec_context->pix_fmt); | 73 VideoFrame::Format format = PixelFormatToVideoFormat(codec_context->pix_fmt); |
76 if (format == VideoFrame::INVALID) | 74 if (format == VideoFrame::INVALID) |
77 return AVERROR(EINVAL); | 75 return AVERROR(EINVAL); |
78 DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16); | 76 DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16); |
79 | 77 |
80 gfx::Size size(codec_context->width, codec_context->height); | 78 gfx::Size size(codec_context->width, codec_context->height); |
81 int ret; | 79 int ret; |
82 if ((ret = av_image_check_size(size.width(), size.height(), 0, NULL)) < 0) | 80 if ((ret = av_image_check_size(size.width(), size.height(), 0, NULL)) < 0) |
83 return ret; | 81 return ret; |
84 | 82 |
85 gfx::Size natural_size; | 83 gfx::Size natural_size; |
86 if (codec_context->sample_aspect_ratio.num > 0) { | 84 if (codec_context->sample_aspect_ratio.num > 0) { |
87 natural_size = GetNaturalSize(size, | 85 natural_size = GetNaturalSize(size, |
88 codec_context->sample_aspect_ratio.num, | 86 codec_context->sample_aspect_ratio.num, |
89 codec_context->sample_aspect_ratio.den); | 87 codec_context->sample_aspect_ratio.den); |
90 } else { | 88 } else { |
91 natural_size = demuxer_stream_->video_decoder_config().natural_size(); | 89 natural_size = config_.natural_size(); |
92 } | 90 } |
93 | 91 |
94 if (!VideoFrame::IsValidConfig(format, size, gfx::Rect(size), natural_size)) | 92 if (!VideoFrame::IsValidConfig(format, size, gfx::Rect(size), natural_size)) |
95 return AVERROR(EINVAL); | 93 return AVERROR(EINVAL); |
96 | 94 |
97 scoped_refptr<VideoFrame> video_frame = | 95 scoped_refptr<VideoFrame> video_frame = |
98 VideoFrame::CreateFrame(format, size, gfx::Rect(size), natural_size, | 96 VideoFrame::CreateFrame(format, size, gfx::Rect(size), natural_size, |
99 kNoTimestamp()); | 97 kNoTimestamp()); |
100 | 98 |
101 for (int i = 0; i < 3; i++) { | 99 for (int i = 0; i < 3; i++) { |
(...skipping 22 matching lines...) Expand all Loading... |
124 static void ReleaseVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { | 122 static void ReleaseVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { |
125 scoped_refptr<VideoFrame> video_frame; | 123 scoped_refptr<VideoFrame> video_frame; |
126 video_frame.swap(reinterpret_cast<VideoFrame**>(&frame->opaque)); | 124 video_frame.swap(reinterpret_cast<VideoFrame**>(&frame->opaque)); |
127 | 125 |
128 // The FFmpeg API expects us to zero the data pointers in | 126 // The FFmpeg API expects us to zero the data pointers in |
129 // this callback | 127 // this callback |
130 memset(frame->data, 0, sizeof(frame->data)); | 128 memset(frame->data, 0, sizeof(frame->data)); |
131 frame->opaque = NULL; | 129 frame->opaque = NULL; |
132 } | 130 } |
133 | 131 |
134 void FFmpegVideoDecoder::Initialize(DemuxerStream* stream, | 132 void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, |
135 const PipelineStatusCB& status_cb, | 133 const PipelineStatusCB& status_cb, |
136 const StatisticsCB& statistics_cb) { | 134 const StatisticsCB& statistics_cb) { |
137 DCHECK(message_loop_->BelongsToCurrentThread()); | 135 DCHECK(message_loop_->BelongsToCurrentThread()); |
138 DCHECK(stream); | |
139 DCHECK(read_cb_.is_null()); | 136 DCHECK(read_cb_.is_null()); |
140 DCHECK(reset_cb_.is_null()); | 137 DCHECK(reset_cb_.is_null()); |
| 138 DCHECK(config.IsValidConfig()); |
| 139 DCHECK(!config.is_encrypted()); |
141 | 140 |
142 FFmpegGlue::InitializeFFmpeg(); | 141 FFmpegGlue::InitializeFFmpeg(); |
143 weak_this_ = weak_factory_.GetWeakPtr(); | 142 weak_this_ = weak_factory_.GetWeakPtr(); |
144 | 143 |
145 demuxer_stream_ = stream; | 144 config_ = config; |
146 statistics_cb_ = statistics_cb; | 145 statistics_cb_ = statistics_cb; |
147 PipelineStatusCB initialize_cb = BindToCurrentLoop(status_cb); | 146 PipelineStatusCB initialize_cb = BindToCurrentLoop(status_cb); |
148 | 147 |
149 if (!ConfigureDecoder()) { | 148 if (!ConfigureDecoder()) { |
150 initialize_cb.Run(DECODER_ERROR_NOT_SUPPORTED); | 149 initialize_cb.Run(DECODER_ERROR_NOT_SUPPORTED); |
151 return; | 150 return; |
152 } | 151 } |
153 | 152 |
154 // Success! | 153 // Success! |
155 state_ = kNormal; | 154 state_ = kNormal; |
156 initialize_cb.Run(PIPELINE_OK); | 155 initialize_cb.Run(PIPELINE_OK); |
157 } | 156 } |
158 | 157 |
159 void FFmpegVideoDecoder::Read(const ReadCB& read_cb) { | 158 void FFmpegVideoDecoder::Decode(const scoped_refptr<DecoderBuffer>& buffer, |
| 159 const ReadCB& read_cb) { |
160 DCHECK(message_loop_->BelongsToCurrentThread()); | 160 DCHECK(message_loop_->BelongsToCurrentThread()); |
161 DCHECK(!read_cb.is_null()); | 161 DCHECK(!read_cb.is_null()); |
162 CHECK_NE(state_, kUninitialized); | 162 CHECK_NE(state_, kUninitialized); |
163 CHECK(read_cb_.is_null()) << "Overlapping decodes are not supported."; | 163 CHECK(read_cb_.is_null()) << "Overlapping decodes are not supported."; |
164 read_cb_ = BindToCurrentLoop(read_cb); | 164 read_cb_ = BindToCurrentLoop(read_cb); |
165 | 165 |
166 if (state_ == kError) { | 166 if (state_ == kError) { |
167 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL); | 167 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL); |
168 return; | 168 return; |
169 } | 169 } |
170 | 170 |
171 // Return empty frames if decoding has finished. | 171 // Return empty frames if decoding has finished. |
172 if (state_ == kDecodeFinished) { | 172 if (state_ == kDecodeFinished) { |
173 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame()); | 173 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame()); |
174 return; | 174 return; |
175 } | 175 } |
176 | 176 |
177 ReadFromDemuxerStream(); | 177 DecodeBuffer(buffer); |
178 } | 178 } |
179 | 179 |
180 void FFmpegVideoDecoder::Reset(const base::Closure& closure) { | 180 void FFmpegVideoDecoder::Reset(const base::Closure& closure) { |
181 DCHECK(message_loop_->BelongsToCurrentThread()); | 181 DCHECK(message_loop_->BelongsToCurrentThread()); |
182 DCHECK(reset_cb_.is_null()); | 182 DCHECK(reset_cb_.is_null()); |
183 reset_cb_ = BindToCurrentLoop(closure); | 183 reset_cb_ = BindToCurrentLoop(closure); |
184 | 184 |
185 // Defer the reset if a read is pending. | 185 // Defer the reset if a read is pending. |
186 if (!read_cb_.is_null()) | 186 if (!read_cb_.is_null()) |
187 return; | 187 return; |
(...skipping 22 matching lines...) Expand all Loading... |
210 ReleaseFFmpegResources(); | 210 ReleaseFFmpegResources(); |
211 state_ = kUninitialized; | 211 state_ = kUninitialized; |
212 } | 212 } |
213 | 213 |
214 FFmpegVideoDecoder::~FFmpegVideoDecoder() { | 214 FFmpegVideoDecoder::~FFmpegVideoDecoder() { |
215 DCHECK_EQ(kUninitialized, state_); | 215 DCHECK_EQ(kUninitialized, state_); |
216 DCHECK(!codec_context_); | 216 DCHECK(!codec_context_); |
217 DCHECK(!av_frame_); | 217 DCHECK(!av_frame_); |
218 } | 218 } |
219 | 219 |
220 void FFmpegVideoDecoder::ReadFromDemuxerStream() { | |
221 DCHECK_NE(state_, kUninitialized); | |
222 DCHECK_NE(state_, kDecodeFinished); | |
223 DCHECK_NE(state_, kError); | |
224 DCHECK(!read_cb_.is_null()); | |
225 | |
226 demuxer_stream_->Read(base::Bind( | |
227 &FFmpegVideoDecoder::BufferReady, weak_this_)); | |
228 } | |
229 | |
230 void FFmpegVideoDecoder::BufferReady( | |
231 DemuxerStream::Status status, | |
232 const scoped_refptr<DecoderBuffer>& buffer) { | |
233 DCHECK(message_loop_->BelongsToCurrentThread()); | |
234 DCHECK_NE(state_, kDecodeFinished); | |
235 DCHECK_NE(state_, kError); | |
236 DCHECK_EQ(status != DemuxerStream::kOk, !buffer) << status; | |
237 | |
238 if (state_ == kUninitialized) | |
239 return; | |
240 | |
241 DCHECK(!read_cb_.is_null()); | |
242 | |
243 if (!reset_cb_.is_null()) { | |
244 base::ResetAndReturn(&read_cb_).Run(kOk, NULL); | |
245 DoReset(); | |
246 return; | |
247 } | |
248 | |
249 if (status == DemuxerStream::kAborted) { | |
250 base::ResetAndReturn(&read_cb_).Run(kOk, NULL); | |
251 return; | |
252 } | |
253 | |
254 // VideoFrameStream ensures no kConfigChanged is passed to VideoDecoders. | |
255 DCHECK_EQ(status, DemuxerStream::kOk) << status; | |
256 DecodeBuffer(buffer); | |
257 } | |
258 | |
259 void FFmpegVideoDecoder::DecodeBuffer( | 220 void FFmpegVideoDecoder::DecodeBuffer( |
260 const scoped_refptr<DecoderBuffer>& buffer) { | 221 const scoped_refptr<DecoderBuffer>& buffer) { |
261 DCHECK(message_loop_->BelongsToCurrentThread()); | 222 DCHECK(message_loop_->BelongsToCurrentThread()); |
262 DCHECK_NE(state_, kUninitialized); | 223 DCHECK_NE(state_, kUninitialized); |
263 DCHECK_NE(state_, kDecodeFinished); | 224 DCHECK_NE(state_, kDecodeFinished); |
264 DCHECK_NE(state_, kError); | 225 DCHECK_NE(state_, kError); |
265 DCHECK(reset_cb_.is_null()); | 226 DCHECK(reset_cb_.is_null()); |
266 DCHECK(!read_cb_.is_null()); | 227 DCHECK(!read_cb_.is_null()); |
267 DCHECK(buffer); | |
268 | 228 |
269 // During decode, because reads are issued asynchronously, it is possible to | 229 // During decode, because reads are issued asynchronously, it is possible to |
270 // receive multiple end of stream buffers since each read is acked. When the | 230 // receive multiple end of stream buffers since each read is acked. When the |
271 // first end of stream buffer is read, FFmpeg may still have frames queued | 231 // first end of stream buffer is read, FFmpeg may still have frames queued |
272 // up in the decoder so we need to go through the decode loop until it stops | 232 // up in the decoder so we need to go through the decode loop until it stops |
273 // giving sensible data. After that, the decoder should output empty | 233 // giving sensible data. After that, the decoder should output empty |
274 // frames. There are three states the decoder can be in: | 234 // frames. There are three states the decoder can be in: |
275 // | 235 // |
276 // kNormal: This is the starting state. Buffers are decoded. Decode errors | 236 // kNormal: This is the starting state. Buffers are decoded. Decode errors |
277 // are discarded. | 237 // are discarded. |
(...skipping 15 matching lines...) Expand all Loading... |
293 // When avcodec_decode_video2() errors out. | 253 // When avcodec_decode_video2() errors out. |
294 // (any state) -> kNormal: | 254 // (any state) -> kNormal: |
295 // Any time Reset() is called. | 255 // Any time Reset() is called. |
296 | 256 |
297 // Transition to kFlushCodec on the first end of stream buffer. | 257 // Transition to kFlushCodec on the first end of stream buffer. |
298 if (state_ == kNormal && buffer->IsEndOfStream()) { | 258 if (state_ == kNormal && buffer->IsEndOfStream()) { |
299 state_ = kFlushCodec; | 259 state_ = kFlushCodec; |
300 } | 260 } |
301 | 261 |
302 scoped_refptr<VideoFrame> video_frame; | 262 scoped_refptr<VideoFrame> video_frame; |
303 if (!Decode(buffer, &video_frame)) { | 263 if (!FFmpegDecode(buffer, &video_frame)) { |
304 state_ = kError; | 264 state_ = kError; |
305 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL); | 265 base::ResetAndReturn(&read_cb_).Run(kDecodeError, NULL); |
306 return; | 266 return; |
307 } | 267 } |
308 | 268 |
309 // Any successful decode counts! | 269 // Any successful decode counts! |
310 if (!buffer->IsEndOfStream() && buffer->GetDataSize() > 0) { | 270 if (!buffer->IsEndOfStream() && buffer->GetDataSize() > 0) { |
311 PipelineStatistics statistics; | 271 PipelineStatistics statistics; |
312 statistics.video_bytes_decoded = buffer->GetDataSize(); | 272 statistics.video_bytes_decoded = buffer->GetDataSize(); |
313 statistics_cb_.Run(statistics); | 273 statistics_cb_.Run(statistics); |
314 } | 274 } |
315 | 275 |
316 if (!video_frame) { | 276 if (!video_frame) { |
317 if (state_ == kFlushCodec) { | 277 if (state_ == kFlushCodec) { |
318 DCHECK(buffer->IsEndOfStream()); | 278 DCHECK(buffer->IsEndOfStream()); |
319 state_ = kDecodeFinished; | 279 state_ = kDecodeFinished; |
320 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame()); | 280 base::ResetAndReturn(&read_cb_).Run(kOk, VideoFrame::CreateEmptyFrame()); |
321 return; | 281 return; |
322 } | 282 } |
323 | 283 |
324 ReadFromDemuxerStream(); | 284 base::ResetAndReturn(&read_cb_).Run(kNotEnoughData, NULL); |
325 return; | 285 return; |
326 } | 286 } |
327 | 287 |
328 base::ResetAndReturn(&read_cb_).Run(kOk, video_frame); | 288 base::ResetAndReturn(&read_cb_).Run(kOk, video_frame); |
329 } | 289 } |
330 | 290 |
331 bool FFmpegVideoDecoder::Decode( | 291 bool FFmpegVideoDecoder::FFmpegDecode( |
332 const scoped_refptr<DecoderBuffer>& buffer, | 292 const scoped_refptr<DecoderBuffer>& buffer, |
333 scoped_refptr<VideoFrame>* video_frame) { | 293 scoped_refptr<VideoFrame>* video_frame) { |
334 DCHECK(video_frame); | 294 DCHECK(video_frame); |
335 | 295 |
336 // Reset frame to default values. | 296 // Reset frame to default values. |
337 avcodec_get_frame_defaults(av_frame_); | 297 avcodec_get_frame_defaults(av_frame_); |
338 | 298 |
339 // Create a packet for input data. | 299 // Create a packet for input data. |
340 // Due to FFmpeg API changes we no longer have const read-only pointers. | 300 // Due to FFmpeg API changes we no longer have const read-only pointers. |
341 AVPacket packet; | 301 AVPacket packet; |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
406 av_free(codec_context_); | 366 av_free(codec_context_); |
407 codec_context_ = NULL; | 367 codec_context_ = NULL; |
408 } | 368 } |
409 if (av_frame_) { | 369 if (av_frame_) { |
410 av_free(av_frame_); | 370 av_free(av_frame_); |
411 av_frame_ = NULL; | 371 av_frame_ = NULL; |
412 } | 372 } |
413 } | 373 } |
414 | 374 |
415 bool FFmpegVideoDecoder::ConfigureDecoder() { | 375 bool FFmpegVideoDecoder::ConfigureDecoder() { |
416 const VideoDecoderConfig& config = demuxer_stream_->video_decoder_config(); | |
417 | |
418 if (!config.IsValidConfig()) { | |
419 DLOG(ERROR) << "Invalid video stream - " << config.AsHumanReadableString(); | |
420 return false; | |
421 } | |
422 | |
423 if (config.is_encrypted()) { | |
424 DLOG(ERROR) << "Encrypted video stream not supported."; | |
425 return false; | |
426 } | |
427 | |
428 // Release existing decoder resources if necessary. | 376 // Release existing decoder resources if necessary. |
429 ReleaseFFmpegResources(); | 377 ReleaseFFmpegResources(); |
430 | 378 |
431 // Initialize AVCodecContext structure. | 379 // Initialize AVCodecContext structure. |
432 codec_context_ = avcodec_alloc_context3(NULL); | 380 codec_context_ = avcodec_alloc_context3(NULL); |
433 VideoDecoderConfigToAVCodecContext(config, codec_context_); | 381 VideoDecoderConfigToAVCodecContext(config_, codec_context_); |
434 | 382 |
435 // Enable motion vector search (potentially slow), strong deblocking filter | 383 // Enable motion vector search (potentially slow), strong deblocking filter |
436 // for damaged macroblocks, and set our error detection sensitivity. | 384 // for damaged macroblocks, and set our error detection sensitivity. |
437 codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; | 385 codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; |
438 codec_context_->thread_count = GetThreadCount(codec_context_->codec_id); | 386 codec_context_->thread_count = GetThreadCount(codec_context_->codec_id); |
439 codec_context_->opaque = this; | 387 codec_context_->opaque = this; |
440 codec_context_->flags |= CODEC_FLAG_EMU_EDGE; | 388 codec_context_->flags |= CODEC_FLAG_EMU_EDGE; |
441 codec_context_->get_buffer = GetVideoBufferImpl; | 389 codec_context_->get_buffer = GetVideoBufferImpl; |
442 codec_context_->release_buffer = ReleaseVideoBufferImpl; | 390 codec_context_->release_buffer = ReleaseVideoBufferImpl; |
443 | 391 |
444 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id); | 392 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id); |
445 if (!codec || avcodec_open2(codec_context_, codec, NULL) < 0) { | 393 if (!codec || avcodec_open2(codec_context_, codec, NULL) < 0) { |
446 ReleaseFFmpegResources(); | 394 ReleaseFFmpegResources(); |
447 return false; | 395 return false; |
448 } | 396 } |
449 | 397 |
450 av_frame_ = avcodec_alloc_frame(); | 398 av_frame_ = avcodec_alloc_frame(); |
451 return true; | 399 return true; |
452 } | 400 } |
453 | 401 |
454 } // namespace media | 402 } // namespace media |
OLD | NEW |