| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/filters/ffmpeg_demuxer.h" | 5 #include "media/filters/ffmpeg_demuxer.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 #include <string> | 8 #include <string> |
| 9 | 9 |
| 10 #include "base/bind.h" | 10 #include "base/bind.h" |
| (...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 255 } | 255 } |
| 256 | 256 |
| 257 // | 257 // |
| 258 // FFmpegDemuxer | 258 // FFmpegDemuxer |
| 259 // | 259 // |
| 260 FFmpegDemuxer::FFmpegDemuxer( | 260 FFmpegDemuxer::FFmpegDemuxer( |
| 261 const scoped_refptr<base::MessageLoopProxy>& message_loop, | 261 const scoped_refptr<base::MessageLoopProxy>& message_loop, |
| 262 const scoped_refptr<DataSource>& data_source) | 262 const scoped_refptr<DataSource>& data_source) |
| 263 : host_(NULL), | 263 : host_(NULL), |
| 264 message_loop_(message_loop), | 264 message_loop_(message_loop), |
| 265 format_context_(NULL), | |
| 266 data_source_(data_source), | 265 data_source_(data_source), |
| 267 read_event_(false, false), | 266 read_event_(false, false), |
| 268 read_has_failed_(false), | 267 read_has_failed_(false), |
| 269 last_read_bytes_(0), | 268 last_read_bytes_(0), |
| 270 read_position_(0), | 269 read_position_(0), |
| 271 bitrate_(0), | 270 bitrate_(0), |
| 272 start_time_(kNoTimestamp()), | 271 start_time_(kNoTimestamp()), |
| 273 audio_disabled_(false), | 272 audio_disabled_(false), |
| 274 duration_known_(false) { | 273 duration_known_(false) { |
| 275 DCHECK(message_loop_); | 274 DCHECK(message_loop_); |
| 276 DCHECK(data_source_); | 275 DCHECK(data_source_); |
| 277 } | 276 } |
| 278 | 277 |
| 279 FFmpegDemuxer::~FFmpegDemuxer() { | 278 FFmpegDemuxer::~FFmpegDemuxer() {} |
| 280 // In this destructor, we clean up resources held by FFmpeg. It is ugly to | |
| 281 // close the codec contexts here because the corresponding codecs are opened | |
| 282 // in the decoder filters. By reaching this point, all filters should have | |
| 283 // stopped, so this is the only safe place to do the global clean up. | |
| 284 // TODO(hclam): close the codecs in the corresponding decoders. | |
| 285 if (!format_context_) | |
| 286 return; | |
| 287 | |
| 288 DestroyAVFormatContext(format_context_); | |
| 289 format_context_ = NULL; | |
| 290 } | |
| 291 | 279 |
| 292 void FFmpegDemuxer::PostDemuxTask() { | 280 void FFmpegDemuxer::PostDemuxTask() { |
| 293 message_loop_->PostTask(FROM_HERE, | 281 message_loop_->PostTask(FROM_HERE, |
| 294 base::Bind(&FFmpegDemuxer::DemuxTask, this)); | 282 base::Bind(&FFmpegDemuxer::DemuxTask, this)); |
| 295 } | 283 } |
| 296 | 284 |
| 297 void FFmpegDemuxer::Stop(const base::Closure& callback) { | 285 void FFmpegDemuxer::Stop(const base::Closure& callback) { |
| 298 // Post a task to notify the streams to stop as well. | 286 // Post a task to notify the streams to stop as well. |
| 299 message_loop_->PostTask(FROM_HERE, | 287 message_loop_->PostTask(FROM_HERE, |
| 300 base::Bind(&FFmpegDemuxer::StopTask, this, callback)); | 288 base::Bind(&FFmpegDemuxer::StopTask, this, callback)); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 337 return *iter; | 325 return *iter; |
| 338 } | 326 } |
| 339 } | 327 } |
| 340 return NULL; | 328 return NULL; |
| 341 } | 329 } |
| 342 | 330 |
| 343 base::TimeDelta FFmpegDemuxer::GetStartTime() const { | 331 base::TimeDelta FFmpegDemuxer::GetStartTime() const { |
| 344 return start_time_; | 332 return start_time_; |
| 345 } | 333 } |
| 346 | 334 |
| 347 size_t FFmpegDemuxer::Read(size_t size, uint8* data) { | 335 int FFmpegDemuxer::Read(int size, uint8* data) { |
| 348 DCHECK(host_); | 336 DCHECK(host_); |
| 349 DCHECK(data_source_); | 337 DCHECK(data_source_); |
| 350 | 338 |
| 351 // If read has ever failed, return with an error. | 339 // If read has ever failed, return with an error. |
| 352 // TODO(hclam): use a more meaningful constant as error. | 340 // TODO(hclam): use a more meaningful constant as error. |
| 353 if (read_has_failed_) | 341 if (read_has_failed_) |
| 354 return AVERROR(EIO); | 342 return AVERROR(EIO); |
| 355 | 343 |
| 356 // Even though FFmpeg defines AVERROR_EOF, it's not to be used with I/O | 344 // Even though FFmpeg defines AVERROR_EOF, it's not to be used with I/O |
| 357 // routines. Instead return 0 for any read at or past EOF. | 345 // routines. Instead return 0 for any read at or past EOF. |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 453 | 441 |
| 454 void FFmpegDemuxer::InitializeTask(DemuxerHost* host, | 442 void FFmpegDemuxer::InitializeTask(DemuxerHost* host, |
| 455 const PipelineStatusCB& status_cb) { | 443 const PipelineStatusCB& status_cb) { |
| 456 DCHECK(message_loop_->BelongsToCurrentThread()); | 444 DCHECK(message_loop_->BelongsToCurrentThread()); |
| 457 host_ = host; | 445 host_ = host; |
| 458 | 446 |
| 459 // TODO(scherkus): DataSource should have a host by this point, | 447 // TODO(scherkus): DataSource should have a host by this point, |
| 460 // see http://crbug.com/122071 | 448 // see http://crbug.com/122071 |
| 461 data_source_->set_host(host); | 449 data_source_->set_host(host); |
| 462 | 450 |
| 463 // Add ourself to Protocol list and get our unique key. | 451 glue_.reset(new FFmpegGlue(this)); |
| 464 std::string key = FFmpegGlue::GetInstance()->AddProtocol(this); | 452 AVFormatContext* format_context = glue_->format_context(); |
| 465 | |
| 466 // Open FFmpeg AVFormatContext. | |
| 467 DCHECK(!format_context_); | |
| 468 AVFormatContext* context = avformat_alloc_context(); | |
| 469 | 453 |
| 470 // Disable ID3v1 tag reading to avoid costly seeks to end of file for data we | 454 // Disable ID3v1 tag reading to avoid costly seeks to end of file for data we |
| 471 // don't use. FFmpeg will only read ID3v1 tags if no other metadata is | 455 // don't use. FFmpeg will only read ID3v1 tags if no other metadata is |
| 472 // available, so add a metadata entry to ensure some is always present. | 456 // available, so add a metadata entry to ensure some is always present. |
| 473 av_dict_set(&context->metadata, "skip_id3v1_tags", "", 0); | 457 av_dict_set(&format_context->metadata, "skip_id3v1_tags", "", 0); |
| 474 | 458 |
| 475 int result = avformat_open_input(&context, key.c_str(), NULL, NULL); | 459 if (!glue_->OpenContext()) { |
| 476 | |
| 477 // Remove ourself from protocol list. | |
| 478 FFmpegGlue::GetInstance()->RemoveProtocol(this); | |
| 479 | |
| 480 if (result < 0) { | |
| 481 status_cb.Run(DEMUXER_ERROR_COULD_NOT_OPEN); | 460 status_cb.Run(DEMUXER_ERROR_COULD_NOT_OPEN); |
| 482 return; | 461 return; |
| 483 } | 462 } |
| 484 | 463 |
| 485 DCHECK(context); | |
| 486 format_context_ = context; | |
| 487 | |
| 488 // Fully initialize AVFormatContext by parsing the stream a little. | 464 // Fully initialize AVFormatContext by parsing the stream a little. |
| 489 result = avformat_find_stream_info(format_context_, NULL); | 465 int result = avformat_find_stream_info(format_context, NULL); |
| 490 if (result < 0) { | 466 if (result < 0) { |
| 491 status_cb.Run(DEMUXER_ERROR_COULD_NOT_PARSE); | 467 status_cb.Run(DEMUXER_ERROR_COULD_NOT_PARSE); |
| 492 return; | 468 return; |
| 493 } | 469 } |
| 494 | 470 |
| 495 // Create demuxer stream entries for each possible AVStream. | 471 // Create demuxer stream entries for each possible AVStream. |
| 496 streams_.resize(format_context_->nb_streams); | 472 streams_.resize(format_context->nb_streams); |
| 497 bool found_audio_stream = false; | 473 bool found_audio_stream = false; |
| 498 bool found_video_stream = false; | 474 bool found_video_stream = false; |
| 499 | 475 |
| 500 base::TimeDelta max_duration; | 476 base::TimeDelta max_duration; |
| 501 for (size_t i = 0; i < format_context_->nb_streams; ++i) { | 477 for (size_t i = 0; i < format_context->nb_streams; ++i) { |
| 502 AVCodecContext* codec_context = format_context_->streams[i]->codec; | 478 AVCodecContext* codec_context = format_context->streams[i]->codec; |
| 503 AVMediaType codec_type = codec_context->codec_type; | 479 AVMediaType codec_type = codec_context->codec_type; |
| 504 | 480 |
| 505 if (codec_type == AVMEDIA_TYPE_AUDIO) { | 481 if (codec_type == AVMEDIA_TYPE_AUDIO) { |
| 506 if (found_audio_stream) | 482 if (found_audio_stream) |
| 507 continue; | 483 continue; |
| 508 // Ensure the codec is supported. | 484 // Ensure the codec is supported. |
| 509 if (CodecIDToAudioCodec(codec_context->codec_id) == kUnknownAudioCodec) | 485 if (CodecIDToAudioCodec(codec_context->codec_id) == kUnknownAudioCodec) |
| 510 continue; | 486 continue; |
| 511 found_audio_stream = true; | 487 found_audio_stream = true; |
| 512 } else if (codec_type == AVMEDIA_TYPE_VIDEO) { | 488 } else if (codec_type == AVMEDIA_TYPE_VIDEO) { |
| 513 if (found_video_stream) | 489 if (found_video_stream) |
| 514 continue; | 490 continue; |
| 515 // Ensure the codec is supported. | 491 // Ensure the codec is supported. |
| 516 if (CodecIDToVideoCodec(codec_context->codec_id) == kUnknownVideoCodec) | 492 if (CodecIDToVideoCodec(codec_context->codec_id) == kUnknownVideoCodec) |
| 517 continue; | 493 continue; |
| 518 found_video_stream = true; | 494 found_video_stream = true; |
| 519 } else { | 495 } else { |
| 520 continue; | 496 continue; |
| 521 } | 497 } |
| 522 | 498 |
| 523 AVStream* stream = format_context_->streams[i]; | 499 AVStream* stream = format_context->streams[i]; |
| 524 scoped_refptr<FFmpegDemuxerStream> demuxer_stream( | 500 scoped_refptr<FFmpegDemuxerStream> demuxer_stream( |
| 525 new FFmpegDemuxerStream(this, stream)); | 501 new FFmpegDemuxerStream(this, stream)); |
| 526 | 502 |
| 527 streams_[i] = demuxer_stream; | 503 streams_[i] = demuxer_stream; |
| 528 max_duration = std::max(max_duration, demuxer_stream->duration()); | 504 max_duration = std::max(max_duration, demuxer_stream->duration()); |
| 529 | 505 |
| 530 if (stream->first_dts != static_cast<int64_t>(AV_NOPTS_VALUE)) { | 506 if (stream->first_dts != static_cast<int64_t>(AV_NOPTS_VALUE)) { |
| 531 const base::TimeDelta first_dts = ConvertFromTimeBase( | 507 const base::TimeDelta first_dts = ConvertFromTimeBase( |
| 532 stream->time_base, stream->first_dts); | 508 stream->time_base, stream->first_dts); |
| 533 if (start_time_ == kNoTimestamp() || first_dts < start_time_) | 509 if (start_time_ == kNoTimestamp() || first_dts < start_time_) |
| 534 start_time_ = first_dts; | 510 start_time_ = first_dts; |
| 535 } | 511 } |
| 536 } | 512 } |
| 537 | 513 |
| 538 if (!found_audio_stream && !found_video_stream) { | 514 if (!found_audio_stream && !found_video_stream) { |
| 539 status_cb.Run(DEMUXER_ERROR_NO_SUPPORTED_STREAMS); | 515 status_cb.Run(DEMUXER_ERROR_NO_SUPPORTED_STREAMS); |
| 540 return; | 516 return; |
| 541 } | 517 } |
| 542 | 518 |
| 543 if (format_context_->duration != static_cast<int64_t>(AV_NOPTS_VALUE)) { | 519 if (format_context->duration != static_cast<int64_t>(AV_NOPTS_VALUE)) { |
| 544 // If there is a duration value in the container use that to find the | 520 // If there is a duration value in the container use that to find the |
| 545 // maximum between it and the duration from A/V streams. | 521 // maximum between it and the duration from A/V streams. |
| 546 const AVRational av_time_base = {1, AV_TIME_BASE}; | 522 const AVRational av_time_base = {1, AV_TIME_BASE}; |
| 547 max_duration = | 523 max_duration = |
| 548 std::max(max_duration, | 524 std::max(max_duration, |
| 549 ConvertFromTimeBase(av_time_base, format_context_->duration)); | 525 ConvertFromTimeBase(av_time_base, format_context->duration)); |
| 550 } else { | 526 } else { |
| 551 // The duration is unknown, in which case this is likely a live stream. | 527 // The duration is unknown, in which case this is likely a live stream. |
| 552 max_duration = kInfiniteDuration(); | 528 max_duration = kInfiniteDuration(); |
| 553 } | 529 } |
| 554 | 530 |
| 555 // Some demuxers, like WAV, do not put timestamps on their frames. We | 531 // Some demuxers, like WAV, do not put timestamps on their frames. We |
| 556 // assume the the start time is 0. | 532 // assume the the start time is 0. |
| 557 if (start_time_ == kNoTimestamp()) | 533 if (start_time_ == kNoTimestamp()) |
| 558 start_time_ = base::TimeDelta(); | 534 start_time_ = base::TimeDelta(); |
| 559 | 535 |
| 560 // Good to go: set the duration and bitrate and notify we're done | 536 // Good to go: set the duration and bitrate and notify we're done |
| 561 // initializing. | 537 // initializing. |
| 562 host_->SetDuration(max_duration); | 538 host_->SetDuration(max_duration); |
| 563 duration_known_ = (max_duration != kInfiniteDuration()); | 539 duration_known_ = (max_duration != kInfiniteDuration()); |
| 564 | 540 |
| 565 int64 filesize_in_bytes = 0; | 541 int64 filesize_in_bytes = 0; |
| 566 GetSize(&filesize_in_bytes); | 542 GetSize(&filesize_in_bytes); |
| 567 bitrate_ = CalculateBitrate(format_context_, max_duration, filesize_in_bytes); | 543 bitrate_ = CalculateBitrate(format_context, max_duration, filesize_in_bytes); |
| 568 if (bitrate_ > 0) | 544 if (bitrate_ > 0) |
| 569 data_source_->SetBitrate(bitrate_); | 545 data_source_->SetBitrate(bitrate_); |
| 570 | 546 |
| 571 status_cb.Run(PIPELINE_OK); | 547 status_cb.Run(PIPELINE_OK); |
| 572 } | 548 } |
| 573 | 549 |
| 574 void FFmpegDemuxer::SeekTask(base::TimeDelta time, const PipelineStatusCB& cb) { | 550 void FFmpegDemuxer::SeekTask(base::TimeDelta time, const PipelineStatusCB& cb) { |
| 575 DCHECK(message_loop_->BelongsToCurrentThread()); | 551 DCHECK(message_loop_->BelongsToCurrentThread()); |
| 576 | 552 |
| 577 // Tell streams to flush buffers due to seeking. | 553 // Tell streams to flush buffers due to seeking. |
| 578 StreamVector::iterator iter; | 554 StreamVector::iterator iter; |
| 579 for (iter = streams_.begin(); iter != streams_.end(); ++iter) { | 555 for (iter = streams_.begin(); iter != streams_.end(); ++iter) { |
| 580 if (*iter) | 556 if (*iter) |
| 581 (*iter)->FlushBuffers(); | 557 (*iter)->FlushBuffers(); |
| 582 } | 558 } |
| 583 | 559 |
| 584 // Always seek to a timestamp less than or equal to the desired timestamp. | 560 // Always seek to a timestamp less than or equal to the desired timestamp. |
| 585 int flags = AVSEEK_FLAG_BACKWARD; | 561 int flags = AVSEEK_FLAG_BACKWARD; |
| 586 | 562 |
| 587 // Passing -1 as our stream index lets FFmpeg pick a default stream. FFmpeg | 563 // Passing -1 as our stream index lets FFmpeg pick a default stream. FFmpeg |
| 588 // will attempt to use the lowest-index video stream, if present, followed by | 564 // will attempt to use the lowest-index video stream, if present, followed by |
| 589 // the lowest-index audio stream. | 565 // the lowest-index audio stream. |
| 590 if (av_seek_frame(format_context_, -1, time.InMicroseconds(), flags) < 0) { | 566 if (av_seek_frame(glue_->format_context(), -1, time.InMicroseconds(), |
| 567 flags) < 0) { |
| 591 // Use VLOG(1) instead of NOTIMPLEMENTED() to prevent the message being | 568 // Use VLOG(1) instead of NOTIMPLEMENTED() to prevent the message being |
| 592 // captured from stdout and contaminates testing. | 569 // captured from stdout and contaminates testing. |
| 593 // TODO(scherkus): Implement this properly and signal error (BUG=23447). | 570 // TODO(scherkus): Implement this properly and signal error (BUG=23447). |
| 594 VLOG(1) << "Not implemented"; | 571 VLOG(1) << "Not implemented"; |
| 595 } | 572 } |
| 596 | 573 |
| 597 // Notify we're finished seeking. | 574 // Notify we're finished seeking. |
| 598 cb.Run(PIPELINE_OK); | 575 cb.Run(PIPELINE_OK); |
| 599 } | 576 } |
| 600 | 577 |
| 601 void FFmpegDemuxer::DemuxTask() { | 578 void FFmpegDemuxer::DemuxTask() { |
| 602 DCHECK(message_loop_->BelongsToCurrentThread()); | 579 DCHECK(message_loop_->BelongsToCurrentThread()); |
| 603 | 580 |
| 604 // Make sure we have work to do before demuxing. | 581 // Make sure we have work to do before demuxing. |
| 605 if (!StreamsHavePendingReads()) { | 582 if (!StreamsHavePendingReads()) { |
| 606 return; | 583 return; |
| 607 } | 584 } |
| 608 | 585 |
| 609 // Allocate and read an AVPacket from the media. | 586 // Allocate and read an AVPacket from the media. |
| 610 scoped_ptr_malloc<AVPacket, ScopedPtrAVFreePacket> packet(new AVPacket()); | 587 scoped_ptr_malloc<AVPacket, ScopedPtrAVFreePacket> packet(new AVPacket()); |
| 611 int result = av_read_frame(format_context_, packet.get()); | 588 int result = av_read_frame(glue_->format_context(), packet.get()); |
| 612 if (result < 0) { | 589 if (result < 0) { |
| 613 // Update the duration based on the audio stream if | 590 // Update the duration based on the audio stream if |
| 614 // it was previously unknown http://crbug.com/86830 | 591 // it was previously unknown http://crbug.com/86830 |
| 615 if (!duration_known_) { | 592 if (!duration_known_) { |
| 616 // Search streams for AUDIO one. | 593 // Search streams for AUDIO one. |
| 617 for (StreamVector::iterator iter = streams_.begin(); | 594 for (StreamVector::iterator iter = streams_.begin(); |
| 618 iter != streams_.end(); | 595 iter != streams_.end(); |
| 619 ++iter) { | 596 ++iter) { |
| 620 if (*iter && (*iter)->type() == DemuxerStream::AUDIO) { | 597 if (*iter && (*iter)->type() == DemuxerStream::AUDIO) { |
| 621 base::TimeDelta duration = (*iter)->GetElapsedTime(); | 598 base::TimeDelta duration = (*iter)->GetElapsedTime(); |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 729 } else if (audio) { | 706 } else if (audio) { |
| 730 buffered = audio->GetBufferedRanges(); | 707 buffered = audio->GetBufferedRanges(); |
| 731 } else if (video) { | 708 } else if (video) { |
| 732 buffered = video->GetBufferedRanges(); | 709 buffered = video->GetBufferedRanges(); |
| 733 } | 710 } |
| 734 for (size_t i = 0; i < buffered.size(); ++i) | 711 for (size_t i = 0; i < buffered.size(); ++i) |
| 735 host_->AddBufferedTimeRange(buffered.start(i), buffered.end(i)); | 712 host_->AddBufferedTimeRange(buffered.start(i), buffered.end(i)); |
| 736 } | 713 } |
| 737 | 714 |
| 738 } // namespace media | 715 } // namespace media |
| OLD | NEW |