OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/filters/ffmpeg_video_decoder.h" | 5 #include "media/filters/ffmpeg_video_decoder.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/callback_helpers.h" | 8 #include "base/callback_helpers.h" |
9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
10 #include "base/message_loop.h" | 10 #include "base/message_loop.h" |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
55 : message_loop_factory_cb_(message_loop_cb), | 55 : message_loop_factory_cb_(message_loop_cb), |
56 message_loop_(NULL), | 56 message_loop_(NULL), |
57 state_(kUninitialized), | 57 state_(kUninitialized), |
58 codec_context_(NULL), | 58 codec_context_(NULL), |
59 av_frame_(NULL), | 59 av_frame_(NULL), |
60 frame_rate_numerator_(0), | 60 frame_rate_numerator_(0), |
61 frame_rate_denominator_(0), | 61 frame_rate_denominator_(0), |
62 decryptor_(NULL) { | 62 decryptor_(NULL) { |
63 } | 63 } |
64 | 64 |
65 int FFmpegVideoDecoder::GetVideoBuffer(AVFrame* frame) { | |
66 VideoFrame::Format format = PixelFormatToVideoFormat(codec_context_->pix_fmt); | |
67 if (format == VideoFrame::INVALID) | |
68 return AVERROR(EINVAL); | |
69 DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16); | |
70 | |
71 int width = codec_context_->width; | |
72 int height = codec_context_->height; | |
73 int ret; | |
74 if ((ret = av_image_check_size(width, height, 0, NULL)) < 0) | |
75 return ret; | |
76 | |
77 scoped_refptr<VideoFrame> video_frame = | |
78 VideoFrame::CreateFrame(format, width, height, | |
79 kNoTimestamp(), kNoTimestamp()); | |
80 | |
81 for (int i = 0; i < 3; i++) { | |
82 frame->base[i] = video_frame->data(i); | |
83 frame->data[i] = video_frame->data(i); | |
84 frame->linesize[i] = video_frame->stride(i); | |
85 } | |
86 | |
87 frame->opaque = video_frame.release(); | |
88 frame->type = FF_BUFFER_TYPE_USER; | |
89 frame->pkt_pts = codec_context_->pkt ? codec_context_->pkt->pts : | |
90 AV_NOPTS_VALUE; | |
91 frame->width = codec_context_->width; | |
92 frame->height = codec_context_->height; | |
93 frame->format = codec_context_->pix_fmt; | |
94 | |
95 return 0; | |
96 } | |
97 | |
98 static int GetVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { | |
99 FFmpegVideoDecoder* vd = static_cast<FFmpegVideoDecoder*>(s->opaque); | |
100 return vd->GetVideoBuffer(frame); | |
101 } | |
102 | |
103 static void ReleaseVideoBufferImpl(AVCodecContext* s, AVFrame* frame) { | |
104 // We're releasing the reference to the buffer allocated in | |
105 // GetVideoBuffer() here, so the explicit Release() here is | |
106 // intentional. | |
107 scoped_refptr<VideoFrame> video_frame = | |
108 static_cast<VideoFrame*>(frame->opaque); | |
109 video_frame->Release(); | |
110 | |
111 // The FFmpeg API expects us to zero the data pointers in | |
112 // this callback | |
113 memset(frame->data, 0, sizeof(frame->data)); | |
114 frame->opaque = NULL; | |
115 } | |
116 | |
65 void FFmpegVideoDecoder::Initialize(const scoped_refptr<DemuxerStream>& stream, | 117 void FFmpegVideoDecoder::Initialize(const scoped_refptr<DemuxerStream>& stream, |
66 const PipelineStatusCB& status_cb, | 118 const PipelineStatusCB& status_cb, |
67 const StatisticsCB& statistics_cb) { | 119 const StatisticsCB& statistics_cb) { |
68 // Ensure FFmpeg has been initialized | 120 // Ensure FFmpeg has been initialized |
69 FFmpegGlue::GetInstance(); | 121 FFmpegGlue::GetInstance(); |
70 | 122 |
71 if (!message_loop_) { | 123 if (!message_loop_) { |
72 message_loop_ = message_loop_factory_cb_.Run(); | 124 message_loop_ = message_loop_factory_cb_.Run(); |
73 message_loop_factory_cb_.Reset(); | 125 message_loop_factory_cb_.Reset(); |
74 | 126 |
(...skipping 26 matching lines...) Expand all Loading... | |
101 | 153 |
102 // Initialize AVCodecContext structure. | 154 // Initialize AVCodecContext structure. |
103 codec_context_ = avcodec_alloc_context3(NULL); | 155 codec_context_ = avcodec_alloc_context3(NULL); |
104 VideoDecoderConfigToAVCodecContext(config, codec_context_); | 156 VideoDecoderConfigToAVCodecContext(config, codec_context_); |
105 | 157 |
106 // Enable motion vector search (potentially slow), strong deblocking filter | 158 // Enable motion vector search (potentially slow), strong deblocking filter |
107 // for damaged macroblocks, and set our error detection sensitivity. | 159 // for damaged macroblocks, and set our error detection sensitivity. |
108 codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; | 160 codec_context_->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; |
109 codec_context_->err_recognition = AV_EF_CAREFUL; | 161 codec_context_->err_recognition = AV_EF_CAREFUL; |
110 codec_context_->thread_count = GetThreadCount(codec_context_->codec_id); | 162 codec_context_->thread_count = GetThreadCount(codec_context_->codec_id); |
163 codec_context_->opaque = this; | |
164 codec_context_->flags |= CODEC_FLAG_EMU_EDGE; | |
165 codec_context_->get_buffer = GetVideoBufferImpl; | |
166 codec_context_->release_buffer = ReleaseVideoBufferImpl; | |
111 | 167 |
112 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id); | 168 AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id); |
113 if (!codec) { | 169 if (!codec) { |
114 status_cb.Run(PIPELINE_ERROR_DECODE); | 170 status_cb.Run(PIPELINE_ERROR_DECODE); |
115 return; | 171 return; |
116 } | 172 } |
117 | 173 |
118 if (avcodec_open2(codec_context_, codec, NULL) < 0) { | 174 if (avcodec_open2(codec_context_, codec, NULL) < 0) { |
119 status_cb.Run(PIPELINE_ERROR_DECODE); | 175 status_cb.Run(PIPELINE_ERROR_DECODE); |
120 return; | 176 return; |
(...skipping 241 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
362 // The decoder is in a bad state and not decoding correctly. | 418 // The decoder is in a bad state and not decoding correctly. |
363 // Checking for NULL avoids a crash in CopyPlane(). | 419 // Checking for NULL avoids a crash in CopyPlane(). |
364 if (!av_frame_->data[VideoFrame::kYPlane] || | 420 if (!av_frame_->data[VideoFrame::kYPlane] || |
365 !av_frame_->data[VideoFrame::kUPlane] || | 421 !av_frame_->data[VideoFrame::kUPlane] || |
366 !av_frame_->data[VideoFrame::kVPlane]) { | 422 !av_frame_->data[VideoFrame::kVPlane]) { |
367 LOG(ERROR) << "Video frame was produced yet has invalid frame data."; | 423 LOG(ERROR) << "Video frame was produced yet has invalid frame data."; |
368 *video_frame = NULL; | 424 *video_frame = NULL; |
369 return false; | 425 return false; |
370 } | 426 } |
371 | 427 |
372 // We've got a frame! Make sure we have a place to store it. | 428 if (!av_frame_->opaque) { |
373 *video_frame = AllocateVideoFrame(); | 429 LOG(ERROR) << "VideoFrame object associated with frame data not set."; |
374 if (!(*video_frame)) { | |
375 LOG(ERROR) << "Failed to allocate video frame"; | |
376 return false; | 430 return false; |
377 } | 431 } |
432 *video_frame = static_cast<VideoFrame *>(av_frame_->opaque); | |
scherkus (not reviewing)
2012/06/18 20:27:38
pointer w/ type: VideoFrame*
| |
378 | 433 |
379 // Determine timestamp and calculate the duration based on the repeat picture | 434 // Determine timestamp and calculate the duration based on the repeat picture |
380 // count. According to FFmpeg docs, the total duration can be calculated as | 435 // count. According to FFmpeg docs, the total duration can be calculated as |
381 // follows: | 436 // follows: |
382 // fps = 1 / time_base | 437 // fps = 1 / time_base |
383 // | 438 // |
384 // duration = (1 / fps) + (repeat_pict) / (2 * fps) | 439 // duration = (1 / fps) + (repeat_pict) / (2 * fps) |
385 // = (2 + repeat_pict) / (2 * fps) | 440 // = (2 + repeat_pict) / (2 * fps) |
386 // = (2 + repeat_pict) / (2 * (1 / time_base)) | 441 // = (2 + repeat_pict) / (2 * (1 / time_base)) |
387 DCHECK_LE(av_frame_->repeat_pict, 2); // Sanity check. | 442 DCHECK_LE(av_frame_->repeat_pict, 2); // Sanity check. |
388 AVRational doubled_time_base; | 443 AVRational doubled_time_base; |
389 doubled_time_base.num = frame_rate_denominator_; | 444 doubled_time_base.num = frame_rate_denominator_; |
390 doubled_time_base.den = frame_rate_numerator_ * 2; | 445 doubled_time_base.den = frame_rate_numerator_ * 2; |
391 | 446 |
392 (*video_frame)->SetTimestamp( | 447 (*video_frame)->SetTimestamp( |
393 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque)); | 448 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque)); |
394 (*video_frame)->SetDuration( | 449 (*video_frame)->SetDuration( |
395 ConvertFromTimeBase(doubled_time_base, 2 + av_frame_->repeat_pict)); | 450 ConvertFromTimeBase(doubled_time_base, 2 + av_frame_->repeat_pict)); |
396 | 451 |
397 // Copy the frame data since FFmpeg reuses internal buffers for AVFrame | |
398 // output, meaning the data is only valid until the next | |
399 // avcodec_decode_video() call. | |
400 int y_rows = codec_context_->height; | |
401 int uv_rows = codec_context_->height; | |
402 if (codec_context_->pix_fmt == PIX_FMT_YUV420P) { | |
403 uv_rows /= 2; | |
404 } | |
405 | |
406 CopyYPlane(av_frame_->data[0], av_frame_->linesize[0], y_rows, *video_frame); | |
407 CopyUPlane(av_frame_->data[1], av_frame_->linesize[1], uv_rows, *video_frame); | |
408 CopyVPlane(av_frame_->data[2], av_frame_->linesize[2], uv_rows, *video_frame); | |
409 | |
410 return true; | 452 return true; |
411 } | 453 } |
412 | 454 |
413 void FFmpegVideoDecoder::DeliverFrame( | 455 void FFmpegVideoDecoder::DeliverFrame( |
414 const scoped_refptr<VideoFrame>& video_frame) { | 456 const scoped_refptr<VideoFrame>& video_frame) { |
415 // Reset the callback before running to protect against reentrancy. | 457 // Reset the callback before running to protect against reentrancy. |
416 base::ResetAndReturn(&read_cb_).Run(kOk, video_frame); | 458 base::ResetAndReturn(&read_cb_).Run(kOk, video_frame); |
417 } | 459 } |
418 | 460 |
419 void FFmpegVideoDecoder::ReleaseFFmpegResources() { | 461 void FFmpegVideoDecoder::ReleaseFFmpegResources() { |
420 if (codec_context_) { | 462 if (codec_context_) { |
421 av_free(codec_context_->extradata); | 463 av_free(codec_context_->extradata); |
422 avcodec_close(codec_context_); | 464 avcodec_close(codec_context_); |
423 av_free(codec_context_); | 465 av_free(codec_context_); |
424 codec_context_ = NULL; | 466 codec_context_ = NULL; |
425 } | 467 } |
426 if (av_frame_) { | 468 if (av_frame_) { |
427 av_free(av_frame_); | 469 av_free(av_frame_); |
428 av_frame_ = NULL; | 470 av_frame_ = NULL; |
429 } | 471 } |
430 } | 472 } |
431 | 473 |
432 scoped_refptr<VideoFrame> FFmpegVideoDecoder::AllocateVideoFrame() { | |
433 VideoFrame::Format format = PixelFormatToVideoFormat(codec_context_->pix_fmt); | |
434 size_t width = codec_context_->width; | |
435 size_t height = codec_context_->height; | |
436 | |
437 return VideoFrame::CreateFrame(format, width, height, | |
438 kNoTimestamp(), kNoTimestamp()); | |
439 } | |
440 | |
441 } // namespace media | 474 } // namespace media |
OLD | NEW |