Chromium Code Reviews| Index: media/filters/gpu_video_decoder.cc |
| diff --git a/media/filters/gpu_video_decoder.cc b/media/filters/gpu_video_decoder.cc |
| index 003f47c0212156fd7c06eeded1dc4f381fa3a23c..e5f620f10caff5eaf1009ace3eb4254ed7085601 100644 |
| --- a/media/filters/gpu_video_decoder.cc |
| +++ b/media/filters/gpu_video_decoder.cc |
| @@ -36,13 +36,6 @@ GpuVideoDecoder::BufferPair::BufferPair( |
| GpuVideoDecoder::BufferPair::~BufferPair() {} |
| -GpuVideoDecoder::BufferTimeData::BufferTimeData( |
| - int32 bbid, base::TimeDelta ts, base::TimeDelta dur) |
| - : bitstream_buffer_id(bbid), timestamp(ts), duration(dur) { |
| -} |
| - |
| -GpuVideoDecoder::BufferTimeData::~BufferTimeData() {} |
| - |
| GpuVideoDecoder::GpuVideoDecoder( |
| MessageLoop* message_loop, |
| MessageLoop* vda_loop, |
| @@ -163,7 +156,6 @@ void GpuVideoDecoder::Initialize(const scoped_refptr<DemuxerStream>& stream, |
| demuxer_stream_->EnableBitstreamConverter(); |
| natural_size_ = config.natural_size(); |
| - config_frame_duration_ = GetFrameDuration(config); |
| DVLOG(1) << "GpuVideoDecoder::Initialize() succeeded."; |
| vda_loop_proxy_->PostTaskAndReply( |
| @@ -274,11 +266,8 @@ void GpuVideoDecoder::RequestBufferDecode( |
| void GpuVideoDecoder::RecordBufferTimeData( |
| const BitstreamBuffer& bitstream_buffer, const Buffer& buffer) { |
| - base::TimeDelta duration = buffer.GetDuration(); |
| - if (duration == base::TimeDelta()) |
| - duration = config_frame_duration_; |
| input_buffer_time_data_.push_front(BufferTimeData( |
| - bitstream_buffer.id(), buffer.GetTimestamp(), duration)); |
| + bitstream_buffer.id(), buffer.GetTimestamp())); |
| // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but |
| // that's too small for some pathological B-frame test videos. The cost of |
| // using too-high a value is low (192 bits per extra slot). |
| @@ -289,21 +278,16 @@ void GpuVideoDecoder::RecordBufferTimeData( |
| input_buffer_time_data_.pop_back(); |
| } |
| -void GpuVideoDecoder::GetBufferTimeData( |
| - int32 id, base::TimeDelta* timestamp, base::TimeDelta* duration) { |
| - // If all else fails later, at least we can set a default duration if there |
| - // was one in the config. |
| - *duration = config_frame_duration_; |
| +base::TimeDelta GpuVideoDecoder::GetBufferTimeData(int32 id) { |
| for (std::list<BufferTimeData>::const_iterator it = |
| input_buffer_time_data_.begin(); it != input_buffer_time_data_.end(); |
| ++it) { |
| - if (it->bitstream_buffer_id != id) |
| + if (it->first != id) |
| continue; |
| - *timestamp = it->timestamp; |
| - *duration = it->duration; |
| - return; |
| + return it->second; |
|
Ami GONE FROM CHROMIUM
2012/07/31 18:44:42
now that you're returning the value it'd be cleare
acolwell GONE FROM CHROMIUM
2012/07/31 19:23:44
Done.
|
| } |
| NOTREACHED() << "Missing bitstreambuffer id: " << id; |
| + return kNoTimestamp(); |
| } |
| const gfx::Size& GpuVideoDecoder::natural_size() { |
| @@ -393,14 +377,11 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) { |
| const PictureBuffer& pb = it->second; |
| // Update frame's timestamp. |
| - base::TimeDelta timestamp; |
| - base::TimeDelta duration; |
| - GetBufferTimeData(picture.bitstream_buffer_id(), ×tamp, &duration); |
| - |
| + base::TimeDelta timestamp = GetBufferTimeData(picture.bitstream_buffer_id()); |
| DCHECK(decoder_texture_target_); |
| scoped_refptr<VideoFrame> frame(VideoFrame::WrapNativeTexture( |
| pb.texture_id(), decoder_texture_target_, pb.size().width(), |
| - pb.size().height(), timestamp, duration, |
| + pb.size().height(), timestamp, |
| base::Bind(&GpuVideoDecoder::ReusePictureBuffer, this, |
| picture.picture_buffer_id()))); |