Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(179)

Side by Side Diff: media/filters/ffmpeg_video_decoder.cc

Issue 331863004: Revert 276344 "Add callback in VideoDecoder and AudioDecoder to ..." (Closed) Base URL: svn://svn.chromium.org/chrome/branches/2049/src/
Patch Set: Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « media/filters/ffmpeg_video_decoder.h ('k') | media/filters/ffmpeg_video_decoder_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/filters/ffmpeg_video_decoder.h" 5 #include "media/filters/ffmpeg_video_decoder.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 #include <string> 8 #include <string>
9 9
10 #include "base/bind.h" 10 #include "base/bind.h"
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 av_buffer_create(frame->data[0], 144 av_buffer_create(frame->data[0],
145 VideoFrame::AllocationSize(format, coded_size), 145 VideoFrame::AllocationSize(format, coded_size),
146 ReleaseVideoBufferImpl, 146 ReleaseVideoBufferImpl,
147 opaque, 147 opaque,
148 0); 148 0);
149 return 0; 149 return 0;
150 } 150 }
151 151
152 void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config, 152 void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config,
153 bool low_delay, 153 bool low_delay,
154 const PipelineStatusCB& status_cb, 154 const PipelineStatusCB& status_cb) {
155 const OutputCB& output_cb) {
156 DCHECK(task_runner_->BelongsToCurrentThread()); 155 DCHECK(task_runner_->BelongsToCurrentThread());
156 DCHECK(decode_cb_.is_null());
157 DCHECK(!config.is_encrypted()); 157 DCHECK(!config.is_encrypted());
158 DCHECK(!output_cb.is_null());
159 158
160 FFmpegGlue::InitializeFFmpeg(); 159 FFmpegGlue::InitializeFFmpeg();
161 160
162 config_ = config; 161 config_ = config;
163 PipelineStatusCB initialize_cb = BindToCurrentLoop(status_cb); 162 PipelineStatusCB initialize_cb = BindToCurrentLoop(status_cb);
164 163
165 if (!config.IsValidConfig() || !ConfigureDecoder(low_delay)) { 164 if (!config.IsValidConfig() || !ConfigureDecoder(low_delay)) {
166 initialize_cb.Run(DECODER_ERROR_NOT_SUPPORTED); 165 initialize_cb.Run(DECODER_ERROR_NOT_SUPPORTED);
167 return; 166 return;
168 } 167 }
169 168
170 output_cb_ = BindToCurrentLoop(output_cb);
171
172 // Success! 169 // Success!
173 state_ = kNormal; 170 state_ = kNormal;
174 initialize_cb.Run(PIPELINE_OK); 171 initialize_cb.Run(PIPELINE_OK);
175 } 172 }
176 173
177 void FFmpegVideoDecoder::Decode(const scoped_refptr<DecoderBuffer>& buffer, 174 void FFmpegVideoDecoder::Decode(const scoped_refptr<DecoderBuffer>& buffer,
178 const DecodeCB& decode_cb) { 175 const DecodeCB& decode_cb) {
179 DCHECK(task_runner_->BelongsToCurrentThread()); 176 DCHECK(task_runner_->BelongsToCurrentThread());
180 DCHECK(buffer);
181 DCHECK(!decode_cb.is_null()); 177 DCHECK(!decode_cb.is_null());
182 CHECK_NE(state_, kUninitialized); 178 CHECK_NE(state_, kUninitialized);
183 179 CHECK(decode_cb_.is_null()) << "Overlapping decodes are not supported.";
184 DecodeCB decode_cb_bound = BindToCurrentLoop(decode_cb); 180 decode_cb_ = BindToCurrentLoop(decode_cb);
185 181
186 if (state_ == kError) { 182 if (state_ == kError) {
187 decode_cb_bound.Run(kDecodeError); 183 base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL);
188 return; 184 return;
189 } 185 }
190 186
187 // Return empty frames if decoding has finished.
191 if (state_ == kDecodeFinished) { 188 if (state_ == kDecodeFinished) {
192 output_cb_.Run(VideoFrame::CreateEOSFrame()); 189 base::ResetAndReturn(&decode_cb_).Run(kOk, VideoFrame::CreateEOSFrame());
193 decode_cb_bound.Run(kOk);
194 return; 190 return;
195 } 191 }
196 192
197 DCHECK_EQ(state_, kNormal); 193 DecodeBuffer(buffer);
194 }
195
196 void FFmpegVideoDecoder::Reset(const base::Closure& closure) {
197 DCHECK(task_runner_->BelongsToCurrentThread());
198 DCHECK(decode_cb_.is_null());
199
200 avcodec_flush_buffers(codec_context_.get());
201 state_ = kNormal;
202 task_runner_->PostTask(FROM_HERE, closure);
203 }
204
205 void FFmpegVideoDecoder::Stop() {
206 DCHECK(task_runner_->BelongsToCurrentThread());
207
208 if (state_ == kUninitialized)
209 return;
210
211 ReleaseFFmpegResources();
212 state_ = kUninitialized;
213 }
214
215 FFmpegVideoDecoder::~FFmpegVideoDecoder() {
216 DCHECK_EQ(kUninitialized, state_);
217 DCHECK(!codec_context_);
218 DCHECK(!av_frame_);
219 }
220
221 void FFmpegVideoDecoder::DecodeBuffer(
222 const scoped_refptr<DecoderBuffer>& buffer) {
223 DCHECK(task_runner_->BelongsToCurrentThread());
224 DCHECK_NE(state_, kUninitialized);
225 DCHECK_NE(state_, kDecodeFinished);
226 DCHECK_NE(state_, kError);
227 DCHECK(!decode_cb_.is_null());
228 DCHECK(buffer);
198 229
199 // During decode, because reads are issued asynchronously, it is possible to 230 // During decode, because reads are issued asynchronously, it is possible to
200 // receive multiple end of stream buffers since each decode is acked. When the 231 // receive multiple end of stream buffers since each decode is acked. When the
201 // first end of stream buffer is read, FFmpeg may still have frames queued 232 // first end of stream buffer is read, FFmpeg may still have frames queued
202 // up in the decoder so we need to go through the decode loop until it stops 233 // up in the decoder so we need to go through the decode loop until it stops
203 // giving sensible data. After that, the decoder should output empty 234 // giving sensible data. After that, the decoder should output empty
204 // frames. There are three states the decoder can be in: 235 // frames. There are three states the decoder can be in:
205 // 236 //
206 // kNormal: This is the starting state. Buffers are decoded. Decode errors 237 // kNormal: This is the starting state. Buffers are decoded. Decode errors
207 // are discarded. 238 // are discarded.
239 // kFlushCodec: There isn't any more input data. Call avcodec_decode_video2
240 // until no more data is returned to flush out remaining
241 // frames. The input buffer is ignored at this point.
208 // kDecodeFinished: All calls return empty frames. 242 // kDecodeFinished: All calls return empty frames.
209 // kError: Unexpected error happened. 243 // kError: Unexpected error happened.
210 // 244 //
211 // These are the possible state transitions. 245 // These are the possible state transitions.
212 // 246 //
213 // kNormal -> kDecodeFinished: 247 // kNormal -> kFlushCodec:
214 // When EOS buffer is received and the codec has been flushed. 248 // When buffer->end_of_stream() is first true.
215 // kNormal -> kError: 249 // kNormal -> kError:
216 // A decoding error occurs and decoding needs to stop. 250 // A decoding error occurs and decoding needs to stop.
251 // kFlushCodec -> kDecodeFinished:
252 // When avcodec_decode_video2() returns 0 data.
253 // kFlushCodec -> kError:
254 // When avcodec_decode_video2() errors out.
217 // (any state) -> kNormal: 255 // (any state) -> kNormal:
218 // Any time Reset() is called. 256 // Any time Reset() is called.
219 257
220 bool has_produced_frame; 258 // Transition to kFlushCodec on the first end of stream buffer.
221 do { 259 if (state_ == kNormal && buffer->end_of_stream()) {
222 has_produced_frame = false; 260 state_ = kFlushCodec;
223 if (!FFmpegDecode(buffer, &has_produced_frame)) { 261 }
224 state_ = kError; 262
225 decode_cb_bound.Run(kDecodeError); 263 scoped_refptr<VideoFrame> video_frame;
264 if (!FFmpegDecode(buffer, &video_frame)) {
265 state_ = kError;
266 base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL);
267 return;
268 }
269
270 if (!video_frame.get()) {
271 if (state_ == kFlushCodec) {
272 DCHECK(buffer->end_of_stream());
273 state_ = kDecodeFinished;
274 base::ResetAndReturn(&decode_cb_)
275 .Run(kOk, VideoFrame::CreateEOSFrame());
226 return; 276 return;
227 } 277 }
228 // Repeat to flush the decoder after receiving EOS buffer.
229 } while (buffer->end_of_stream() && has_produced_frame);
230 278
231 if (buffer->end_of_stream()) { 279 base::ResetAndReturn(&decode_cb_).Run(kNotEnoughData, NULL);
232 output_cb_.Run(VideoFrame::CreateEOSFrame()); 280 return;
233 state_ = kDecodeFinished;
234 } 281 }
235 282
236 decode_cb_bound.Run(kOk); 283 base::ResetAndReturn(&decode_cb_).Run(kOk, video_frame);
237 }
238
239 void FFmpegVideoDecoder::Reset(const base::Closure& closure) {
240 DCHECK(task_runner_->BelongsToCurrentThread());
241
242 avcodec_flush_buffers(codec_context_.get());
243 state_ = kNormal;
244 task_runner_->PostTask(FROM_HERE, closure);
245 }
246
247 void FFmpegVideoDecoder::Stop() {
248 DCHECK(task_runner_->BelongsToCurrentThread());
249
250 if (state_ == kUninitialized)
251 return;
252
253 ReleaseFFmpegResources();
254 state_ = kUninitialized;
255 }
256
257 FFmpegVideoDecoder::~FFmpegVideoDecoder() {
258 DCHECK_EQ(kUninitialized, state_);
259 DCHECK(!codec_context_);
260 DCHECK(!av_frame_);
261 } 284 }
262 285
263 bool FFmpegVideoDecoder::FFmpegDecode( 286 bool FFmpegVideoDecoder::FFmpegDecode(
264 const scoped_refptr<DecoderBuffer>& buffer, 287 const scoped_refptr<DecoderBuffer>& buffer,
265 bool* has_produced_frame) { 288 scoped_refptr<VideoFrame>* video_frame) {
266 DCHECK(!*has_produced_frame); 289 DCHECK(video_frame);
267 290
268 // Create a packet for input data. 291 // Create a packet for input data.
269 // Due to FFmpeg API changes we no longer have const read-only pointers. 292 // Due to FFmpeg API changes we no longer have const read-only pointers.
270 AVPacket packet; 293 AVPacket packet;
271 av_init_packet(&packet); 294 av_init_packet(&packet);
272 if (buffer->end_of_stream()) { 295 if (buffer->end_of_stream()) {
273 packet.data = NULL; 296 packet.data = NULL;
274 packet.size = 0; 297 packet.size = 0;
275 } else { 298 } else {
276 packet.data = const_cast<uint8*>(buffer->data()); 299 packet.data = const_cast<uint8*>(buffer->data());
277 packet.size = buffer->data_size(); 300 packet.size = buffer->data_size();
278 301
279 // Let FFmpeg handle presentation timestamp reordering. 302 // Let FFmpeg handle presentation timestamp reordering.
280 codec_context_->reordered_opaque = buffer->timestamp().InMicroseconds(); 303 codec_context_->reordered_opaque = buffer->timestamp().InMicroseconds();
281 } 304 }
282 305
283 int frame_decoded = 0; 306 int frame_decoded = 0;
284 int result = avcodec_decode_video2(codec_context_.get(), 307 int result = avcodec_decode_video2(codec_context_.get(),
285 av_frame_.get(), 308 av_frame_.get(),
286 &frame_decoded, 309 &frame_decoded,
287 &packet); 310 &packet);
288 // Log the problem if we can't decode a video frame and exit early. 311 // Log the problem if we can't decode a video frame and exit early.
289 if (result < 0) { 312 if (result < 0) {
290 LOG(ERROR) << "Error decoding video: " << buffer->AsHumanReadableString(); 313 LOG(ERROR) << "Error decoding video: " << buffer->AsHumanReadableString();
314 *video_frame = NULL;
291 return false; 315 return false;
292 } 316 }
293 317
294 // FFmpeg says some codecs might have multiple frames per packet. Previous 318 // FFmpeg says some codecs might have multiple frames per packet. Previous
295 // discussions with rbultje@ indicate this shouldn't be true for the codecs 319 // discussions with rbultje@ indicate this shouldn't be true for the codecs
296 // we use. 320 // we use.
297 DCHECK_EQ(result, packet.size); 321 DCHECK_EQ(result, packet.size);
298 322
299 // If no frame was produced then signal that more data is required to 323 // If no frame was produced then signal that more data is required to
300 // produce more frames. This can happen under two circumstances: 324 // produce more frames. This can happen under two circumstances:
301 // 1) Decoder was recently initialized/flushed 325 // 1) Decoder was recently initialized/flushed
302 // 2) End of stream was reached and all internal frames have been output 326 // 2) End of stream was reached and all internal frames have been output
303 if (frame_decoded == 0) { 327 if (frame_decoded == 0) {
328 *video_frame = NULL;
304 return true; 329 return true;
305 } 330 }
306 331
307 // TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675 332 // TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675
308 // The decoder is in a bad state and not decoding correctly. 333 // The decoder is in a bad state and not decoding correctly.
309 // Checking for NULL avoids a crash in CopyPlane(). 334 // Checking for NULL avoids a crash in CopyPlane().
310 if (!av_frame_->data[VideoFrame::kYPlane] || 335 if (!av_frame_->data[VideoFrame::kYPlane] ||
311 !av_frame_->data[VideoFrame::kUPlane] || 336 !av_frame_->data[VideoFrame::kUPlane] ||
312 !av_frame_->data[VideoFrame::kVPlane]) { 337 !av_frame_->data[VideoFrame::kVPlane]) {
313 LOG(ERROR) << "Video frame was produced yet has invalid frame data."; 338 LOG(ERROR) << "Video frame was produced yet has invalid frame data.";
339 *video_frame = NULL;
314 av_frame_unref(av_frame_.get()); 340 av_frame_unref(av_frame_.get());
315 return false; 341 return false;
316 } 342 }
317 343
318 scoped_refptr<VideoFrame> frame = 344 *video_frame =
319 reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0])); 345 reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0]));
320 frame->set_timestamp( 346
347 (*video_frame)->set_timestamp(
321 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque)); 348 base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque));
322 *has_produced_frame = true;
323 output_cb_.Run(frame);
324 349
325 av_frame_unref(av_frame_.get()); 350 av_frame_unref(av_frame_.get());
326 return true; 351 return true;
327 } 352 }
328 353
329 void FFmpegVideoDecoder::ReleaseFFmpegResources() { 354 void FFmpegVideoDecoder::ReleaseFFmpegResources() {
330 codec_context_.reset(); 355 codec_context_.reset();
331 av_frame_.reset(); 356 av_frame_.reset();
332 } 357 }
333 358
(...skipping 19 matching lines...) Expand all
353 if (!codec || avcodec_open2(codec_context_.get(), codec, NULL) < 0) { 378 if (!codec || avcodec_open2(codec_context_.get(), codec, NULL) < 0) {
354 ReleaseFFmpegResources(); 379 ReleaseFFmpegResources();
355 return false; 380 return false;
356 } 381 }
357 382
358 av_frame_.reset(av_frame_alloc()); 383 av_frame_.reset(av_frame_alloc());
359 return true; 384 return true;
360 } 385 }
361 386
362 } // namespace media 387 } // namespace media
OLDNEW
« no previous file with comments | « media/filters/ffmpeg_video_decoder.h ('k') | media/filters/ffmpeg_video_decoder_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698