Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(75)

Side by Side Diff: media/filters/ffmpeg_video_decoder.cc

Issue 3014059: media: recycle buffers/direct rendering etc. (third patch) (Closed)
Patch Set: code review Created 10 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/filters/ffmpeg_video_decoder.h ('k') | media/filters/omx_video_decode_engine.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2010 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/filters/ffmpeg_video_decoder.h" 5 #include "media/filters/ffmpeg_video_decoder.h"
6 6
7 #include <deque> 7 #include <deque>
8 8
9 #include "base/task.h" 9 #include "base/task.h"
10 #include "media/base/callback.h" 10 #include "media/base/callback.h"
11 #include "media/base/filters.h" 11 #include "media/base/filters.h"
12 #include "media/base/filter_host.h" 12 #include "media/base/filter_host.h"
13 #include "media/base/limits.h" 13 #include "media/base/limits.h"
14 #include "media/base/media_format.h" 14 #include "media/base/media_format.h"
15 #include "media/base/video_frame.h" 15 #include "media/base/video_frame.h"
16 #include "media/ffmpeg/ffmpeg_common.h" 16 #include "media/ffmpeg/ffmpeg_common.h"
17 #include "media/ffmpeg/ffmpeg_util.h" 17 #include "media/ffmpeg/ffmpeg_util.h"
18 #include "media/filters/ffmpeg_interfaces.h" 18 #include "media/filters/ffmpeg_interfaces.h"
19 #include "media/filters/ffmpeg_video_decode_engine.h" 19 #include "media/filters/ffmpeg_video_decode_engine.h"
20 #include "media/filters/video_decode_engine.h" 20 #include "media/filters/video_decode_engine.h"
21 21
22 namespace media { 22 namespace media {
23 23
24 FFmpegVideoDecoder::FFmpegVideoDecoder(VideoDecodeEngine* engine) 24 FFmpegVideoDecoder::FFmpegVideoDecoder(VideoDecodeEngine* engine)
25 : width_(0), 25 : width_(0),
26 height_(0), 26 height_(0),
27 time_base_(new AVRational()), 27 time_base_(new AVRational()),
28 state_(kUnInitialized), 28 state_(kUnInitialized),
29 decode_engine_(engine), 29 decode_engine_(engine) {
30 pending_reads_(0),
31 pending_requests_(0) {
32 memset(&info_, 0, sizeof(info_)); 30 memset(&info_, 0, sizeof(info_));
33 } 31 }
34 32
35 FFmpegVideoDecoder::~FFmpegVideoDecoder() { 33 FFmpegVideoDecoder::~FFmpegVideoDecoder() {
36 } 34 }
37 35
38 void FFmpegVideoDecoder::Initialize(DemuxerStream* demuxer_stream, 36 void FFmpegVideoDecoder::Initialize(DemuxerStream* demuxer_stream,
39 FilterCallback* callback) { 37 FilterCallback* callback) {
40 if (MessageLoop::current() != message_loop()) { 38 if (MessageLoop::current() != message_loop()) {
41 message_loop()->PostTask(FROM_HERE, 39 message_loop()->PostTask(FROM_HERE,
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after
143 } 141 }
144 142
145 void FFmpegVideoDecoder::OnUninitializeComplete() { 143 void FFmpegVideoDecoder::OnUninitializeComplete() {
146 DCHECK_EQ(MessageLoop::current(), message_loop()); 144 DCHECK_EQ(MessageLoop::current(), message_loop());
147 DCHECK(uninitialize_callback_.get()); 145 DCHECK(uninitialize_callback_.get());
148 146
149 AutoCallbackRunner done_runner(uninitialize_callback_.release()); 147 AutoCallbackRunner done_runner(uninitialize_callback_.release());
150 state_ = kStopped; 148 state_ = kStopped;
151 } 149 }
152 150
151 void FFmpegVideoDecoder::Pause(FilterCallback* callback) {
152 if (MessageLoop::current() != message_loop()) {
153 message_loop()->PostTask(FROM_HERE,
154 NewRunnableMethod(this,
155 &FFmpegVideoDecoder::Pause,
156 callback));
157 return;
158 }
159
160 AutoCallbackRunner done_runner(callback);
161 state_ = kPausing;
162 }
163
153 void FFmpegVideoDecoder::Flush(FilterCallback* callback) { 164 void FFmpegVideoDecoder::Flush(FilterCallback* callback) {
154 if (MessageLoop::current() != message_loop()) { 165 if (MessageLoop::current() != message_loop()) {
155 message_loop()->PostTask(FROM_HERE, 166 message_loop()->PostTask(FROM_HERE,
156 NewRunnableMethod(this, 167 NewRunnableMethod(this,
157 &FFmpegVideoDecoder::Flush, 168 &FFmpegVideoDecoder::Flush,
158 callback)); 169 callback));
159 return; 170 return;
160 } 171 }
161 172
162 DCHECK_EQ(MessageLoop::current(), message_loop()); 173 DCHECK_EQ(MessageLoop::current(), message_loop());
163 DCHECK(!flush_callback_.get()); 174 DCHECK(!flush_callback_.get());
164 175
176 state_ = kFlushing;
177
178 FlushBuffers();
179
165 flush_callback_.reset(callback); 180 flush_callback_.reset(callback);
166 181
167 // Everything in the presentation time queue is invalid, clear the queue.
168 while (!pts_heap_.IsEmpty())
169 pts_heap_.Pop();
170
171 decode_engine_->Flush(); 182 decode_engine_->Flush();
172 } 183 }
173 184
174 void FFmpegVideoDecoder::OnFlushComplete() { 185 void FFmpegVideoDecoder::OnFlushComplete() {
175 DCHECK_EQ(MessageLoop::current(), message_loop()); 186 DCHECK_EQ(MessageLoop::current(), message_loop());
176 DCHECK(flush_callback_.get()); 187 DCHECK(flush_callback_.get());
177 188
178 AutoCallbackRunner done_runner(flush_callback_.release()); 189 AutoCallbackRunner done_runner(flush_callback_.release());
190
191 // Everything in the presentation time queue is invalid, clear the queue.
192 while (!pts_heap_.IsEmpty())
193 pts_heap_.Pop();
194
195 // Mark flush operation had been done.
196 state_ = kNormal;
179 } 197 }
180 198
181 void FFmpegVideoDecoder::Seek(base::TimeDelta time, 199 void FFmpegVideoDecoder::Seek(base::TimeDelta time,
182 FilterCallback* callback) { 200 FilterCallback* callback) {
183 if (MessageLoop::current() != message_loop()) { 201 if (MessageLoop::current() != message_loop()) {
184 message_loop()->PostTask(FROM_HERE, 202 message_loop()->PostTask(FROM_HERE,
185 NewRunnableMethod(this, 203 NewRunnableMethod(this,
186 &FFmpegVideoDecoder::Seek, 204 &FFmpegVideoDecoder::Seek,
187 time, 205 time,
188 callback)); 206 callback));
189 return; 207 return;
190 } 208 }
191 209
192 DCHECK_EQ(MessageLoop::current(), message_loop()); 210 DCHECK_EQ(MessageLoop::current(), message_loop());
193 DCHECK(!seek_callback_.get()); 211 DCHECK(!seek_callback_.get());
194 212
195 // TODO(jiesun): when we move to parallel Flush, we should remove this.
196 DCHECK_EQ(0u, pending_reads_) << "Pending reads should have completed";
197 DCHECK_EQ(0u, pending_requests_) << "Pending requests should be empty";
198
199 seek_callback_.reset(callback); 213 seek_callback_.reset(callback);
200 decode_engine_->Seek(); 214 decode_engine_->Seek();
201 } 215 }
202 216
203 void FFmpegVideoDecoder::OnSeekComplete() { 217 void FFmpegVideoDecoder::OnSeekComplete() {
204 DCHECK_EQ(MessageLoop::current(), message_loop()); 218 DCHECK_EQ(MessageLoop::current(), message_loop());
205 DCHECK(seek_callback_.get()); 219 DCHECK(seek_callback_.get());
206 220
207 AutoCallbackRunner done_runner(seek_callback_.release()); 221 AutoCallbackRunner done_runner(seek_callback_.release());
208 state_ = kNormal;
209 } 222 }
210 223
211 void FFmpegVideoDecoder::OnError() { 224 void FFmpegVideoDecoder::OnError() {
212 NOTIMPLEMENTED(); 225 NOTIMPLEMENTED();
213 } 226 }
214 227
215 void FFmpegVideoDecoder::OnFormatChange(VideoStreamInfo stream_info) { 228 void FFmpegVideoDecoder::OnFormatChange(VideoStreamInfo stream_info) {
216 NOTIMPLEMENTED(); 229 NOTIMPLEMENTED();
217 } 230 }
218 231
219 void FFmpegVideoDecoder::OnReadComplete(Buffer* buffer_in) { 232 void FFmpegVideoDecoder::OnReadComplete(Buffer* buffer_in) {
220 scoped_refptr<Buffer> buffer = buffer_in; 233 scoped_refptr<Buffer> buffer = buffer_in;
221 message_loop()->PostTask( 234 message_loop()->PostTask(
222 FROM_HERE, 235 FROM_HERE,
223 NewRunnableMethod(this, 236 NewRunnableMethod(this,
224 &FFmpegVideoDecoder::OnReadCompleteTask, 237 &FFmpegVideoDecoder::OnReadCompleteTask,
225 buffer)); 238 buffer));
226 } 239 }
227 240
228 void FFmpegVideoDecoder::OnReadCompleteTask(scoped_refptr<Buffer> buffer) { 241 void FFmpegVideoDecoder::OnReadCompleteTask(scoped_refptr<Buffer> buffer) {
229 DCHECK_EQ(MessageLoop::current(), message_loop()); 242 DCHECK_EQ(MessageLoop::current(), message_loop());
230 DCHECK_GT(pending_reads_, 0u); 243 DCHECK_NE(state_, kStopped); // because of Flush() before Stop().
231
232 --pending_reads_;
233 244
234 // During decode, because reads are issued asynchronously, it is possible to 245 // During decode, because reads are issued asynchronously, it is possible to
235 // receive multiple end of stream buffers since each read is acked. When the 246 // receive multiple end of stream buffers since each read is acked. When the
236 // first end of stream buffer is read, FFmpeg may still have frames queued 247 // first end of stream buffer is read, FFmpeg may still have frames queued
237 // up in the decoder so we need to go through the decode loop until it stops 248 // up in the decoder so we need to go through the decode loop until it stops
238 // giving sensible data. After that, the decoder should output empty 249 // giving sensible data. After that, the decoder should output empty
239 // frames. There are three states the decoder can be in: 250 // frames. There are three states the decoder can be in:
240 // 251 //
241 // kNormal: This is the starting state. Buffers are decoded. Decode errors 252 // kNormal: This is the starting state. Buffers are decoded. Decode errors
242 // are discarded. 253 // are discarded.
243 // kFlushCodec: There isn't any more input data. Call avcodec_decode_video2 254 // kFlushCodec: There isn't any more input data. Call avcodec_decode_video2
244 // until no more data is returned to flush out remaining 255 // until no more data is returned to flush out remaining
245 // frames. The input buffer is ignored at this point. 256 // frames. The input buffer is ignored at this point.
246 // kDecodeFinished: All calls return empty frames. 257 // kDecodeFinished: All calls return empty frames.
247 // 258 //
248 // These are the possible state transitions. 259 // These are the possible state transitions.
249 // 260 //
250 // kNormal -> kFlushCodec: 261 // kNormal -> kFlushCodec:
251 // When buffer->IsEndOfStream() is first true. 262 // When buffer->IsEndOfStream() is first true.
252 // kNormal -> kDecodeFinished: 263 // kNormal -> kDecodeFinished:
253 // A catastrophic failure occurs, and decoding needs to stop. 264 // A catastrophic failure occurs, and decoding needs to stop.
254 // kFlushCodec -> kDecodeFinished: 265 // kFlushCodec -> kDecodeFinished:
255 // When avcodec_decode_video2() returns 0 data or errors out. 266 // When avcodec_decode_video2() returns 0 data or errors out.
256 // (any state) -> kNormal: 267 // (any state) -> kNormal:
257 // Any time buffer->IsDiscontinuous() is true. 268 // Any time buffer->IsDiscontinuous() is true.
258 //
259 // If the decoding is finished, we just always return empty frames.
260 if (state_ == kDecodeFinished || state_ == kStopped) {
261 DCHECK(buffer->IsEndOfStream());
262
263 --pending_requests_;
264 // Signal VideoRenderer the end of the stream event.
265 scoped_refptr<VideoFrame> video_frame;
266 VideoFrame::CreateEmptyFrame(&video_frame);
267 fill_buffer_done_callback()->Run(video_frame);
268 return;
269 }
270 269
271 // Transition to kFlushCodec on the first end of stream buffer. 270 // Transition to kFlushCodec on the first end of stream buffer.
272 if (state_ == kNormal && buffer->IsEndOfStream()) { 271 if (state_ == kNormal && buffer->IsEndOfStream()) {
273 state_ = kFlushCodec; 272 state_ = kFlushCodec;
274 } 273 }
275 274
276 // Push all incoming timestamps into the priority queue as long as we have 275 // Push all incoming timestamps into the priority queue as long as we have
277 // not yet received an end of stream buffer. It is important that this line 276 // not yet received an end of stream buffer. It is important that this line
278 // stay below the state transition into kFlushCodec done above. 277 // stay below the state transition into kFlushCodec done above.
279 // 278 //
(...skipping 15 matching lines...) Expand all
295 FROM_HERE, 294 FROM_HERE,
296 NewRunnableMethod(this, 295 NewRunnableMethod(this,
297 &FFmpegVideoDecoder::FillThisBuffer, 296 &FFmpegVideoDecoder::FillThisBuffer,
298 video_frame)); 297 video_frame));
299 return; 298 return;
300 } 299 }
301 300
302 DCHECK_EQ(MessageLoop::current(), message_loop()); 301 DCHECK_EQ(MessageLoop::current(), message_loop());
303 302
304 // Synchronized flushing before stop should prevent this. 303 // Synchronized flushing before stop should prevent this.
305 if (state_ == kStopped) 304 DCHECK_NE(state_, kStopped);
306 return; // Discard the video frame. 305
306 // If the decoding is finished, we just always return empty frames.
307 if (state_ == kDecodeFinished) {
308 // Signal VideoRenderer the end of the stream event.
309 scoped_refptr<VideoFrame> empty_frame;
310 VideoFrame::CreateEmptyFrame(&empty_frame);
311 fill_buffer_done_callback()->Run(empty_frame);
312
313 // Fall through, because we still need to keep record of this frame.
314 }
307 315
308 // Notify decode engine the available of new frame. 316 // Notify decode engine the available of new frame.
309 ++pending_requests_;
310 decode_engine_->FillThisBuffer(video_frame); 317 decode_engine_->FillThisBuffer(video_frame);
311 } 318 }
312 319
313 void FFmpegVideoDecoder::OnFillBufferCallback( 320 void FFmpegVideoDecoder::OnFillBufferCallback(
314 scoped_refptr<VideoFrame> video_frame) { 321 scoped_refptr<VideoFrame> video_frame) {
315 DCHECK_EQ(MessageLoop::current(), message_loop()); 322 DCHECK_EQ(MessageLoop::current(), message_loop());
316 323 DCHECK_NE(state_, kStopped);
317 // TODO(jiesun): Flush before stop will prevent this from happening.
318 if (state_ == kStopped)
319 return; // Discard the video frame.
320 324
321 if (video_frame.get()) { 325 if (video_frame.get()) {
326 if (kPausing == state_ || kFlushing == state_) {
327 frame_queue_flushed_.push_back(video_frame);
328 if (kFlushing == state_)
329 FlushBuffers();
330 return;
331 }
332
322 // If we actually got data back, enqueue a frame. 333 // If we actually got data back, enqueue a frame.
323 last_pts_ = FindPtsAndDuration(*time_base_, &pts_heap_, last_pts_, 334 last_pts_ = FindPtsAndDuration(*time_base_, &pts_heap_, last_pts_,
324 video_frame.get()); 335 video_frame.get());
325 336
326 video_frame->SetTimestamp(last_pts_.timestamp); 337 video_frame->SetTimestamp(last_pts_.timestamp);
327 video_frame->SetDuration(last_pts_.duration); 338 video_frame->SetDuration(last_pts_.duration);
328 339
329 // Deliver this frame to VideoRenderer.
330 --pending_requests_;
331 fill_buffer_done_callback()->Run(video_frame); 340 fill_buffer_done_callback()->Run(video_frame);
332 } else { 341 } else {
333 // When in kFlushCodec, any errored decode, or a 0-lengthed frame, 342 // When in kFlushCodec, any errored decode, or a 0-lengthed frame,
334 // is taken as a signal to stop decoding. 343 // is taken as a signal to stop decoding.
335 if (state_ == kFlushCodec) { 344 if (state_ == kFlushCodec) {
336 state_ = kDecodeFinished; 345 state_ = kDecodeFinished;
337 346
338 --pending_requests_;
339 // Signal VideoRenderer the end of the stream event. 347 // Signal VideoRenderer the end of the stream event.
340 scoped_refptr<VideoFrame> video_frame; 348 scoped_refptr<VideoFrame> video_frame;
341 VideoFrame::CreateEmptyFrame(&video_frame); 349 VideoFrame::CreateEmptyFrame(&video_frame);
342 fill_buffer_done_callback()->Run(video_frame); 350 fill_buffer_done_callback()->Run(video_frame);
343 } 351 }
344 } 352 }
345 } 353 }
346 354
347 void FFmpegVideoDecoder::OnEmptyBufferCallback( 355 void FFmpegVideoDecoder::OnEmptyBufferCallback(
348 scoped_refptr<Buffer> buffer) { 356 scoped_refptr<Buffer> buffer) {
349 DCHECK_EQ(MessageLoop::current(), message_loop()); 357 DCHECK_EQ(MessageLoop::current(), message_loop());
350 DCHECK_LE(pending_reads_, pending_requests_); 358 DCHECK_NE(state_, kStopped);
351 359
352 demuxer_stream_->Read( 360 demuxer_stream_->Read(
353 NewCallback(this, &FFmpegVideoDecoder::OnReadComplete)); 361 NewCallback(this, &FFmpegVideoDecoder::OnReadComplete));
354 ++pending_reads_;
355 } 362 }
356 363
357 FFmpegVideoDecoder::TimeTuple FFmpegVideoDecoder::FindPtsAndDuration( 364 FFmpegVideoDecoder::TimeTuple FFmpegVideoDecoder::FindPtsAndDuration(
358 const AVRational& time_base, 365 const AVRational& time_base,
359 PtsHeap* pts_heap, 366 PtsHeap* pts_heap,
360 const TimeTuple& last_pts, 367 const TimeTuple& last_pts,
361 const VideoFrame* frame) { 368 const VideoFrame* frame) {
362 TimeTuple pts; 369 TimeTuple pts;
363 370
364 // First search the VideoFrame for the pts. This is the most authoritative. 371 // First search the VideoFrame for the pts. This is the most authoritative.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
401 } 408 }
402 409
403 return pts; 410 return pts;
404 } 411 }
405 412
406 bool FFmpegVideoDecoder::ProvidesBuffer() { 413 bool FFmpegVideoDecoder::ProvidesBuffer() {
407 DCHECK(info_.success_); 414 DCHECK(info_.success_);
408 return info_.provides_buffers_; 415 return info_.provides_buffers_;
409 } 416 }
410 417
418 void FFmpegVideoDecoder::FlushBuffers() {
419 while (!frame_queue_flushed_.empty()) {
420 scoped_refptr<VideoFrame> video_frame;
421 video_frame = frame_queue_flushed_.front();
422 frame_queue_flushed_.pop_front();
423
424 // Depends on who own the buffers, we either return it to the renderer
425 // or return it to the decode engine.
426 if (ProvidesBuffer())
427 decode_engine_->FillThisBuffer(video_frame);
428 else
429 fill_buffer_done_callback()->Run(video_frame);
430 }
431 }
432
411 void FFmpegVideoDecoder::SetVideoDecodeEngineForTest( 433 void FFmpegVideoDecoder::SetVideoDecodeEngineForTest(
412 VideoDecodeEngine* engine) { 434 VideoDecodeEngine* engine) {
413 decode_engine_ = engine; 435 decode_engine_ = engine;
414 } 436 }
415 437
416 // static 438 // static
417 FilterFactory* FFmpegVideoDecoder::CreateFactory() { 439 FilterFactory* FFmpegVideoDecoder::CreateFactory() {
418 return new FilterFactoryImpl1<FFmpegVideoDecoder, FFmpegVideoDecodeEngine*>( 440 return new FilterFactoryImpl1<FFmpegVideoDecoder, FFmpegVideoDecodeEngine*>(
419 new FFmpegVideoDecodeEngine()); 441 new FFmpegVideoDecodeEngine());
420 } 442 }
421 443
422 // static 444 // static
423 bool FFmpegVideoDecoder::IsMediaFormatSupported(const MediaFormat& format) { 445 bool FFmpegVideoDecoder::IsMediaFormatSupported(const MediaFormat& format) {
424 std::string mime_type; 446 std::string mime_type;
425 return format.GetAsString(MediaFormat::kMimeType, &mime_type) && 447 return format.GetAsString(MediaFormat::kMimeType, &mime_type) &&
426 mime_type::kFFmpegVideo == mime_type; 448 mime_type::kFFmpegVideo == mime_type;
427 } 449 }
428 450
429 } // namespace media 451 } // namespace media
OLDNEW
« no previous file with comments | « media/filters/ffmpeg_video_decoder.h ('k') | media/filters/omx_video_decode_engine.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698