Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(751)

Unified Diff: media/filters/gpu_video_decoder.cc

Issue 10832087: Remove VideoDecoderConfig.frame_rate_xxx() & VideoFrame:Get/SetDuration() (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 8 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: media/filters/gpu_video_decoder.cc
diff --git a/media/filters/gpu_video_decoder.cc b/media/filters/gpu_video_decoder.cc
index 003f47c0212156fd7c06eeded1dc4f381fa3a23c..b97f668ca29a51855ee5fb70ee47c6b0f9193068 100644
--- a/media/filters/gpu_video_decoder.cc
+++ b/media/filters/gpu_video_decoder.cc
@@ -37,8 +37,8 @@ GpuVideoDecoder::BufferPair::BufferPair(
GpuVideoDecoder::BufferPair::~BufferPair() {}
GpuVideoDecoder::BufferTimeData::BufferTimeData(
- int32 bbid, base::TimeDelta ts, base::TimeDelta dur)
- : bitstream_buffer_id(bbid), timestamp(ts), duration(dur) {
+ int32 bbid, base::TimeDelta ts)
+ : bitstream_buffer_id(bbid), timestamp(ts) {
}
GpuVideoDecoder::BufferTimeData::~BufferTimeData() {}
@@ -163,7 +163,6 @@ void GpuVideoDecoder::Initialize(const scoped_refptr<DemuxerStream>& stream,
demuxer_stream_->EnableBitstreamConverter();
natural_size_ = config.natural_size();
- config_frame_duration_ = GetFrameDuration(config);
DVLOG(1) << "GpuVideoDecoder::Initialize() succeeded.";
vda_loop_proxy_->PostTaskAndReply(
@@ -274,11 +273,8 @@ void GpuVideoDecoder::RequestBufferDecode(
void GpuVideoDecoder::RecordBufferTimeData(
const BitstreamBuffer& bitstream_buffer, const Buffer& buffer) {
- base::TimeDelta duration = buffer.GetDuration();
- if (duration == base::TimeDelta())
- duration = config_frame_duration_;
input_buffer_time_data_.push_front(BufferTimeData(
- bitstream_buffer.id(), buffer.GetTimestamp(), duration));
+ bitstream_buffer.id(), buffer.GetTimestamp()));
// Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
// that's too small for some pathological B-frame test videos. The cost of
// using too-high a value is low (192 bits per extra slot).
@@ -290,17 +286,13 @@ void GpuVideoDecoder::RecordBufferTimeData(
}
void GpuVideoDecoder::GetBufferTimeData(
Ami GONE FROM CHROMIUM 2012/07/31 18:13:52 this could just return the timedelta now
acolwell GONE FROM CHROMIUM 2012/07/31 18:41:24 Done.
- int32 id, base::TimeDelta* timestamp, base::TimeDelta* duration) {
- // If all else fails later, at least we can set a default duration if there
- // was one in the config.
- *duration = config_frame_duration_;
+ int32 id, base::TimeDelta* timestamp) {
for (std::list<BufferTimeData>::const_iterator it =
input_buffer_time_data_.begin(); it != input_buffer_time_data_.end();
++it) {
if (it->bitstream_buffer_id != id)
continue;
*timestamp = it->timestamp;
- *duration = it->duration;
return;
}
NOTREACHED() << "Missing bitstreambuffer id: " << id;
@@ -394,13 +386,12 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) {
// Update frame's timestamp.
base::TimeDelta timestamp;
- base::TimeDelta duration;
- GetBufferTimeData(picture.bitstream_buffer_id(), &timestamp, &duration);
+ GetBufferTimeData(picture.bitstream_buffer_id(), &timestamp);
DCHECK(decoder_texture_target_);
scoped_refptr<VideoFrame> frame(VideoFrame::WrapNativeTexture(
pb.texture_id(), decoder_texture_target_, pb.size().width(),
- pb.size().height(), timestamp, duration,
+ pb.size().height(), timestamp,
base::Bind(&GpuVideoDecoder::ReusePictureBuffer, this,
picture.picture_buffer_id())));

Powered by Google App Engine
This is Rietveld 408576698