Index: media/filters/ffmpeg_video_decoder.cc |
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc |
index bc2346ddf8843526fde8ae980e847488e7ba0b13..5a8851572e7019f2765d6e7b13595c37faa74d89 100644 |
--- a/media/filters/ffmpeg_video_decoder.cc |
+++ b/media/filters/ffmpeg_video_decoder.cc |
@@ -135,10 +135,11 @@ static void ReleaseVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { |
void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, |
bool low_delay, |
- const PipelineStatusCB& status_cb) { |
+ const PipelineStatusCB& status_cb, |
+ const OutputCB& output_cb) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
- DCHECK(decode_cb_.is_null()); |
DCHECK(!config.is_encrypted()); |
+ DCHECK(!output_cb.is_null()); |
FFmpegGlue::InitializeFFmpeg(); |
@@ -150,6 +151,8 @@ void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, |
return; |
} |
+ output_cb_ = BindToCurrentLoop(output_cb); |
+ |
// Success! |
state_ = kNormal; |
initialize_cb.Run(PIPELINE_OK); |
@@ -158,58 +161,25 @@ void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, |
void FFmpegVideoDecoder::Decode(const scoped_refptr<DecoderBuffer>& buffer, |
const DecodeCB& decode_cb) { |
DCHECK(task_runner_->BelongsToCurrentThread()); |
+ DCHECK(buffer); |
DCHECK(!decode_cb.is_null()); |
CHECK_NE(state_, kUninitialized); |
- CHECK(decode_cb_.is_null()) << "Overlapping decodes are not supported."; |
- decode_cb_ = BindToCurrentLoop(decode_cb); |
+ |
+ DecodeCB decode_cb_bound = BindToCurrentLoop(decode_cb); |
if (state_ == kError) { |
- base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL); |
+ decode_cb_bound.Run(kDecodeError); |
return; |
} |
// Return empty frames if decoding has finished. |
if (state_ == kDecodeFinished) { |
xhwang
2014/06/05 21:53:51
nit: Probably this should never happen (DecoderStr
Sergey Ulanov
2014/06/06 22:49:41
Done.
Sergey Ulanov
2014/06/06 23:12:34
Actually DecoderStream may call this method after
xhwang
2014/06/07 00:35:14
Can you add a TODO here?
|
- base::ResetAndReturn(&decode_cb_).Run(kOk, VideoFrame::CreateEOSFrame()); |
+ output_cb_.Run(VideoFrame::CreateEOSFrame()); |
+ decode_cb_bound.Run(kOk); |
return; |
} |
- DecodeBuffer(buffer); |
-} |
- |
-void FFmpegVideoDecoder::Reset(const base::Closure& closure) { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- DCHECK(decode_cb_.is_null()); |
- |
- avcodec_flush_buffers(codec_context_.get()); |
- state_ = kNormal; |
- task_runner_->PostTask(FROM_HERE, closure); |
-} |
- |
-void FFmpegVideoDecoder::Stop() { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- |
- if (state_ == kUninitialized) |
- return; |
- |
- ReleaseFFmpegResources(); |
- state_ = kUninitialized; |
-} |
- |
-FFmpegVideoDecoder::~FFmpegVideoDecoder() { |
- DCHECK_EQ(kUninitialized, state_); |
- DCHECK(!codec_context_); |
- DCHECK(!av_frame_); |
-} |
- |
-void FFmpegVideoDecoder::DecodeBuffer( |
- const scoped_refptr<DecoderBuffer>& buffer) { |
- DCHECK(task_runner_->BelongsToCurrentThread()); |
- DCHECK_NE(state_, kUninitialized); |
- DCHECK_NE(state_, kDecodeFinished); |
- DCHECK_NE(state_, kError); |
- DCHECK(!decode_cb_.is_null()); |
- DCHECK(buffer); |
+ DCHECK_EQ(state_, kNormal); |
// During decode, because reads are issued asynchronously, it is possible to |
// receive multiple end of stream buffers since each decode is acked. When the |
@@ -228,49 +198,61 @@ void FFmpegVideoDecoder::DecodeBuffer( |
// |
// These are the possible state transitions. |
// |
- // kNormal -> kFlushCodec: |
- // When buffer->end_of_stream() is first true. |
+ // kNormal -> kDecodeFinished: |
+ // When EOS buffer is received. |
xhwang
2014/06/05 21:53:50
When EOS buffer is received and the codec has been
Sergey Ulanov
2014/06/06 22:49:41
Done.
|
// kNormal -> kError: |
// A decoding error occurs and decoding needs to stop. |
- // kFlushCodec -> kDecodeFinished: |
- // When avcodec_decode_video2() returns 0 data. |
- // kFlushCodec -> kError: |
- // When avcodec_decode_video2() errors out. |
// (any state) -> kNormal: |
// Any time Reset() is called. |
- // Transition to kFlushCodec on the first end of stream buffer. |
- if (state_ == kNormal && buffer->end_of_stream()) { |
- state_ = kFlushCodec; |
- } |
+ bool repeat; |
+ do { |
+ bool produced_frame = false; |
+ if (!FFmpegDecode(buffer, &produced_frame)) { |
xhwang
2014/06/05 21:53:50
|produced_frame| could be read as a noun. How abou
Sergey Ulanov
2014/06/06 22:49:41
Done.
|
+ state_ = kError; |
+ decode_cb_bound.Run(kDecodeError); |
+ return; |
+ } |
+ // Repeat to flush the decoder after receiving EOS buffer. |
+ repeat = buffer->end_of_stream() && produced_frame; |
+ } while (repeat); |
xhwang
2014/06/05 21:53:50
nit: how about just
while (buffer->end_of_stream(
Sergey Ulanov
2014/06/06 22:49:41
Done.
|
- scoped_refptr<VideoFrame> video_frame; |
- if (!FFmpegDecode(buffer, &video_frame)) { |
- state_ = kError; |
- base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL); |
- return; |
+ if (buffer->end_of_stream()) { |
+ output_cb_.Run(VideoFrame::CreateEOSFrame()); |
+ state_ = kDecodeFinished; |
} |
- if (!video_frame.get()) { |
- if (state_ == kFlushCodec) { |
- DCHECK(buffer->end_of_stream()); |
- state_ = kDecodeFinished; |
- base::ResetAndReturn(&decode_cb_) |
- .Run(kOk, VideoFrame::CreateEOSFrame()); |
- return; |
- } |
+ decode_cb_bound.Run(kOk); |
+} |
+ |
+void FFmpegVideoDecoder::Reset(const base::Closure& closure) { |
+ DCHECK(task_runner_->BelongsToCurrentThread()); |
+ |
+ avcodec_flush_buffers(codec_context_.get()); |
+ state_ = kNormal; |
+ task_runner_->PostTask(FROM_HERE, closure); |
+} |
+ |
+void FFmpegVideoDecoder::Stop() { |
+ DCHECK(task_runner_->BelongsToCurrentThread()); |
- base::ResetAndReturn(&decode_cb_).Run(kNotEnoughData, NULL); |
+ if (state_ == kUninitialized) |
return; |
- } |
- base::ResetAndReturn(&decode_cb_).Run(kOk, video_frame); |
+ ReleaseFFmpegResources(); |
+ state_ = kUninitialized; |
+} |
+ |
+FFmpegVideoDecoder::~FFmpegVideoDecoder() { |
+ DCHECK_EQ(kUninitialized, state_); |
+ DCHECK(!codec_context_); |
+ DCHECK(!av_frame_); |
} |
bool FFmpegVideoDecoder::FFmpegDecode( |
const scoped_refptr<DecoderBuffer>& buffer, |
- scoped_refptr<VideoFrame>* video_frame) { |
- DCHECK(video_frame); |
+ bool* produced_frame) { |
+ *produced_frame = false; |
xhwang
2014/06/05 21:53:51
DCHECK(!*produced_frame);
Sergey Ulanov
2014/06/06 22:49:41
Done.
|
// Reset frame to default values. |
avcodec_get_frame_defaults(av_frame_.get()); |
@@ -302,7 +284,6 @@ bool FFmpegVideoDecoder::FFmpegDecode( |
// Log the problem if we can't decode a video frame and exit early. |
if (result < 0) { |
LOG(ERROR) << "Error decoding video: " << buffer->AsHumanReadableString(); |
- *video_frame = NULL; |
return false; |
} |
@@ -311,7 +292,6 @@ bool FFmpegVideoDecoder::FFmpegDecode( |
// 1) Decoder was recently initialized/flushed |
// 2) End of stream was reached and all internal frames have been output |
if (frame_decoded == 0) { |
- *video_frame = NULL; |
return true; |
} |
@@ -322,7 +302,6 @@ bool FFmpegVideoDecoder::FFmpegDecode( |
!av_frame_->data[VideoFrame::kUPlane] || |
!av_frame_->data[VideoFrame::kVPlane]) { |
LOG(ERROR) << "Video frame was produced yet has invalid frame data."; |
- *video_frame = NULL; |
return false; |
} |
@@ -330,10 +309,12 @@ bool FFmpegVideoDecoder::FFmpegDecode( |
LOG(ERROR) << "VideoFrame object associated with frame data not set."; |
return false; |
} |
- *video_frame = static_cast<VideoFrame*>(av_frame_->opaque); |
- (*video_frame)->set_timestamp( |
+ scoped_refptr<VideoFrame> frame = static_cast<VideoFrame*>(av_frame_->opaque); |
+ frame->set_timestamp( |
base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque)); |
+ *produced_frame = true; |
+ output_cb_.Run(frame); |
return true; |
} |