Chromium Code Reviews| Index: media/filters/gpu_video_decoder.cc |
| diff --git a/media/filters/gpu_video_decoder.cc b/media/filters/gpu_video_decoder.cc |
| index 848ae666c303fca1ec585b6b7d14c08188c453ed..d36686a1e4865b339b84106380e5947796f289e6 100644 |
| --- a/media/filters/gpu_video_decoder.cc |
| +++ b/media/filters/gpu_video_decoder.cc |
| @@ -36,6 +36,13 @@ GpuVideoDecoder::BufferPair::BufferPair( |
| GpuVideoDecoder::BufferPair::~BufferPair() {} |
| +GpuVideoDecoder::BufferData::BufferData( |
| + int32 bbid, base::TimeDelta ts, const gfx::Size& natural_size) |
| + : bitstream_buffer_id(bbid), timestamp(ts), natural_size(natural_size) { |
|
Ami GONE FROM CHROMIUM
2012/08/02 00:16:14
natural_size is a self-assignment here.
acolwell GONE FROM CHROMIUM
2012/08/02 02:16:30
oops.. Done
|
| +} |
| + |
| +GpuVideoDecoder::BufferData::~BufferData() {} |
| + |
| GpuVideoDecoder::GpuVideoDecoder( |
| MessageLoop* message_loop, |
| MessageLoop* vda_loop, |
| @@ -258,39 +265,38 @@ void GpuVideoDecoder::RequestBufferDecode( |
| bool inserted = bitstream_buffers_in_decoder_.insert(std::make_pair( |
| bitstream_buffer.id(), BufferPair(shm_buffer, buffer))).second; |
| DCHECK(inserted); |
| - RecordBufferTimeData(bitstream_buffer, *buffer); |
| + RecordBufferData(bitstream_buffer, *buffer); |
| vda_loop_proxy_->PostTask(FROM_HERE, base::Bind( |
| &VideoDecodeAccelerator::Decode, weak_vda_, bitstream_buffer)); |
| } |
| -void GpuVideoDecoder::RecordBufferTimeData( |
| +void GpuVideoDecoder::RecordBufferData( |
| const BitstreamBuffer& bitstream_buffer, const Buffer& buffer) { |
| - input_buffer_time_data_.push_front(BufferTimeData( |
| - bitstream_buffer.id(), buffer.GetTimestamp())); |
| + input_buffer_time_data_.push_front(BufferData( |
| + bitstream_buffer.id(), buffer.GetTimestamp(), natural_size_)); |
| // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but |
| // that's too small for some pathological B-frame test videos. The cost of |
| // using too-high a value is low (192 bits per extra slot). |
| - static const size_t kMaxInputBufferTimeDataSize = 128; |
| + static const size_t kMaxInputBufferDataSize = 128; |
| // Pop from the back of the list, because that's the oldest and least likely |
| // to be useful in the future data. |
| - if (input_buffer_time_data_.size() > kMaxInputBufferTimeDataSize) |
| + if (input_buffer_time_data_.size() > kMaxInputBufferDataSize) |
| input_buffer_time_data_.pop_back(); |
| } |
| -base::TimeDelta GpuVideoDecoder::GetBufferTimestamp(int32 id) { |
| - for (std::list<BufferTimeData>::const_iterator it = |
| +void GpuVideoDecoder::GetBufferData(int32 id, base::TimeDelta* timestamp, |
| + gfx::Size* natural_size) { |
| + for (std::list<BufferData>::const_iterator it = |
| input_buffer_time_data_.begin(); it != input_buffer_time_data_.end(); |
| ++it) { |
| - if (it->first == id) |
| - return it->second; |
| + if (it->bitstream_buffer_id != id) |
| + continue; |
| + *timestamp = it->timestamp; |
| + *natural_size = it->natural_size; |
| + return; |
| } |
| NOTREACHED() << "Missing bitstreambuffer id: " << id; |
| - return kNoTimestamp(); |
| -} |
| - |
| -const gfx::Size& GpuVideoDecoder::natural_size() { |
| - return natural_size_; |
| } |
| bool GpuVideoDecoder::HasAlpha() const { |
| @@ -376,11 +382,13 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) { |
| const PictureBuffer& pb = it->second; |
| // Update frame's timestamp. |
| - base::TimeDelta timestamp = GetBufferTimestamp(picture.bitstream_buffer_id()); |
| + base::TimeDelta timestamp; |
| + gfx::Size natural_size; |
| + GetBufferData(picture.bitstream_buffer_id(), ×tamp, &natural_size); |
| DCHECK(decoder_texture_target_); |
| scoped_refptr<VideoFrame> frame(VideoFrame::WrapNativeTexture( |
| pb.texture_id(), decoder_texture_target_, pb.size().width(), |
| - pb.size().height(), timestamp, |
| + pb.size().height(), natural_size, timestamp, |
| base::Bind(&GpuVideoDecoder::ReusePictureBuffer, this, |
| picture.picture_buffer_id()))); |