Index: media/filters/ffmpeg_video_decoder.cc |
=================================================================== |
--- media/filters/ffmpeg_video_decoder.cc (revision 277175) |
+++ media/filters/ffmpeg_video_decoder.cc (working copy) |
@@ -151,11 +151,10 @@ |
void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, |
bool low_delay, |
- const PipelineStatusCB& status_cb, |
- const OutputCB& output_cb) { |
+ const PipelineStatusCB& status_cb) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
+ DCHECK(decode_cb_.is_null()); |
DCHECK(!config.is_encrypted()); |
- DCHECK(!output_cb.is_null()); |
FFmpegGlue::InitializeFFmpeg(); |
@@ -167,8 +166,6 @@ |
return; |
} |
- output_cb_ = BindToCurrentLoop(output_cb); |
- |
// Success! |
state_ = kNormal; |
initialize_cb.Run(PIPELINE_OK); |
@@ -177,25 +174,59 @@ |
void FFmpegVideoDecoder::Decode(const scoped_refptr<DecoderBuffer>& buffer, |
const DecodeCB& decode_cb) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
- DCHECK(buffer); |
DCHECK(!decode_cb.is_null()); |
CHECK_NE(state_, kUninitialized); |
+ CHECK(decode_cb_.is_null()) << "Overlapping decodes are not supported."; |
+ decode_cb_ = BindToCurrentLoop(decode_cb); |
- DecodeCB decode_cb_bound = BindToCurrentLoop(decode_cb); |
- |
if (state_ == kError) { |
- decode_cb_bound.Run(kDecodeError); |
+ base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL); |
return; |
} |
+ // Return empty frames if decoding has finished. |
if (state_ == kDecodeFinished) { |
- output_cb_.Run(VideoFrame::CreateEOSFrame()); |
- decode_cb_bound.Run(kOk); |
+ base::ResetAndReturn(&decode_cb_).Run(kOk, VideoFrame::CreateEOSFrame()); |
return; |
} |
- DCHECK_EQ(state_, kNormal); |
+ DecodeBuffer(buffer); |
+} |
+void FFmpegVideoDecoder::Reset(const base::Closure& closure) { |
+ DCHECK(task_runner_->BelongsToCurrentThread()); |
+ DCHECK(decode_cb_.is_null()); |
+ |
+ avcodec_flush_buffers(codec_context_.get()); |
+ state_ = kNormal; |
+ task_runner_->PostTask(FROM_HERE, closure); |
+} |
+ |
+void FFmpegVideoDecoder::Stop() { |
+ DCHECK(task_runner_->BelongsToCurrentThread()); |
+ |
+ if (state_ == kUninitialized) |
+ return; |
+ |
+ ReleaseFFmpegResources(); |
+ state_ = kUninitialized; |
+} |
+ |
+FFmpegVideoDecoder::~FFmpegVideoDecoder() { |
+ DCHECK_EQ(kUninitialized, state_); |
+ DCHECK(!codec_context_); |
+ DCHECK(!av_frame_); |
+} |
+ |
+void FFmpegVideoDecoder::DecodeBuffer( |
+ const scoped_refptr<DecoderBuffer>& buffer) { |
+ DCHECK(task_runner_->BelongsToCurrentThread()); |
+ DCHECK_NE(state_, kUninitialized); |
+ DCHECK_NE(state_, kDecodeFinished); |
+ DCHECK_NE(state_, kError); |
+ DCHECK(!decode_cb_.is_null()); |
+ DCHECK(buffer); |
+ |
// During decode, because reads are issued asynchronously, it is possible to |
// receive multiple end of stream buffers since each decode is acked. When the |
// first end of stream buffer is read, FFmpeg may still have frames queued |
@@ -205,65 +236,57 @@ |
// |
// kNormal: This is the starting state. Buffers are decoded. Decode errors |
// are discarded. |
+ // kFlushCodec: There isn't any more input data. Call avcodec_decode_video2 |
+ // until no more data is returned to flush out remaining |
+ // frames. The input buffer is ignored at this point. |
// kDecodeFinished: All calls return empty frames. |
// kError: Unexpected error happened. |
// |
// These are the possible state transitions. |
// |
- // kNormal -> kDecodeFinished: |
- // When EOS buffer is received and the codec has been flushed. |
+ // kNormal -> kFlushCodec: |
+ // When buffer->end_of_stream() is first true. |
// kNormal -> kError: |
// A decoding error occurs and decoding needs to stop. |
+ // kFlushCodec -> kDecodeFinished: |
+ // When avcodec_decode_video2() returns 0 data. |
+ // kFlushCodec -> kError: |
+ // When avcodec_decode_video2() errors out. |
// (any state) -> kNormal: |
// Any time Reset() is called. |
- bool has_produced_frame; |
- do { |
- has_produced_frame = false; |
- if (!FFmpegDecode(buffer, &has_produced_frame)) { |
- state_ = kError; |
- decode_cb_bound.Run(kDecodeError); |
- return; |
- } |
- // Repeat to flush the decoder after receiving EOS buffer. |
- } while (buffer->end_of_stream() && has_produced_frame); |
+ // Transition to kFlushCodec on the first end of stream buffer. |
+ if (state_ == kNormal && buffer->end_of_stream()) { |
+ state_ = kFlushCodec; |
+ } |
- if (buffer->end_of_stream()) { |
- output_cb_.Run(VideoFrame::CreateEOSFrame()); |
- state_ = kDecodeFinished; |
+ scoped_refptr<VideoFrame> video_frame; |
+ if (!FFmpegDecode(buffer, &video_frame)) { |
+ state_ = kError; |
+ base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL); |
+ return; |
} |
- decode_cb_bound.Run(kOk); |
-} |
+ if (!video_frame.get()) { |
+ if (state_ == kFlushCodec) { |
+ DCHECK(buffer->end_of_stream()); |
+ state_ = kDecodeFinished; |
+ base::ResetAndReturn(&decode_cb_) |
+ .Run(kOk, VideoFrame::CreateEOSFrame()); |
+ return; |
+ } |
-void FFmpegVideoDecoder::Reset(const base::Closure& closure) { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- |
- avcodec_flush_buffers(codec_context_.get()); |
- state_ = kNormal; |
- task_runner_->PostTask(FROM_HERE, closure); |
-} |
- |
-void FFmpegVideoDecoder::Stop() { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- |
- if (state_ == kUninitialized) |
+ base::ResetAndReturn(&decode_cb_).Run(kNotEnoughData, NULL); |
return; |
+ } |
- ReleaseFFmpegResources(); |
- state_ = kUninitialized; |
+ base::ResetAndReturn(&decode_cb_).Run(kOk, video_frame); |
} |
-FFmpegVideoDecoder::~FFmpegVideoDecoder() { |
- DCHECK_EQ(kUninitialized, state_); |
- DCHECK(!codec_context_); |
- DCHECK(!av_frame_); |
-} |
- |
bool FFmpegVideoDecoder::FFmpegDecode( |
const scoped_refptr<DecoderBuffer>& buffer, |
- bool* has_produced_frame) { |
- DCHECK(!*has_produced_frame); |
+ scoped_refptr<VideoFrame>* video_frame) { |
+ DCHECK(video_frame); |
// Create a packet for input data. |
// Due to FFmpeg API changes we no longer have const read-only pointers. |
@@ -288,6 +311,7 @@ |
// Log the problem if we can't decode a video frame and exit early. |
if (result < 0) { |
LOG(ERROR) << "Error decoding video: " << buffer->AsHumanReadableString(); |
+ *video_frame = NULL; |
return false; |
} |
@@ -301,6 +325,7 @@ |
// 1) Decoder was recently initialized/flushed |
// 2) End of stream was reached and all internal frames have been output |
if (frame_decoded == 0) { |
+ *video_frame = NULL; |
return true; |
} |
@@ -311,16 +336,16 @@ |
!av_frame_->data[VideoFrame::kUPlane] || |
!av_frame_->data[VideoFrame::kVPlane]) { |
LOG(ERROR) << "Video frame was produced yet has invalid frame data."; |
+ *video_frame = NULL; |
av_frame_unref(av_frame_.get()); |
return false; |
} |
- scoped_refptr<VideoFrame> frame = |
+ *video_frame = |
reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0])); |
- frame->set_timestamp( |
+ |
+ (*video_frame)->set_timestamp( |
base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque)); |
- *has_produced_frame = true; |
- output_cb_.Run(frame); |
av_frame_unref(av_frame_.get()); |
return true; |