Index: content/renderer/media/rtc_video_decoder.cc |
diff --git a/content/renderer/media/rtc_video_decoder.cc b/content/renderer/media/rtc_video_decoder.cc |
index 4e4b3199683e7a0f33e37f48fee7360be304417c..930e788ffe18368caad933681644fceefaab72a3 100644 |
--- a/content/renderer/media/rtc_video_decoder.cc |
+++ b/content/renderer/media/rtc_video_decoder.cc |
@@ -61,13 +61,9 @@ RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); } |
RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, |
uint32_t timestamp, |
- int width, |
- int height, |
size_t size) |
: bitstream_buffer_id(bitstream_buffer_id), |
timestamp(timestamp), |
- width(width), |
- height(height), |
size(size) {} |
RTCVideoDecoder::BufferData::BufferData() {} |
@@ -200,6 +196,7 @@ int32_t RTCVideoDecoder::Decode( |
// internally. Platforms whose VDAs fail to support mid-stream resolution |
// change gracefully need to have their clients cover for them, and we do that |
// here. |
+ // Note this may not work because encoded size is not always available. |
Pawel Osciak
2014/08/12 08:50:40
Please actually explain why and what the consequen
kcwu
2014/08/13 14:27:19
I don't know what to say here. Could you advise?
Pawel Osciak
2014/08/14 07:15:36
Please say when encoded size is not available and
kcwu
2014/08/14 12:31:01
per offline chat, the original code already silent
|
#ifdef ANDROID |
const bool kVDACanHandleMidstreamResize = false; |
#else |
@@ -208,13 +205,15 @@ int32_t RTCVideoDecoder::Decode( |
bool need_to_reset_for_midstream_resize = false; |
if (inputImage._frameType == webrtc::kKeyFrame) { |
- DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x" |
- << inputImage._encodedHeight; |
- gfx::Size prev_frame_size = frame_size_; |
- frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); |
- if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() && |
- prev_frame_size != frame_size_) { |
- need_to_reset_for_midstream_resize = true; |
+ if (inputImage._encodedWidth && inputImage._encodedHeight) { |
+ DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x" |
+ << inputImage._encodedHeight; |
+ gfx::Size prev_frame_size = frame_size_; |
+ frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); |
+ if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() && |
+ prev_frame_size != frame_size_) { |
+ need_to_reset_for_midstream_resize = true; |
+ } |
} |
} else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_, |
reset_bitstream_buffer_id_)) { |
@@ -227,8 +226,6 @@ int32_t RTCVideoDecoder::Decode( |
// Create buffer metadata. |
BufferData buffer_data(next_bitstream_buffer_id_, |
inputImage._timeStamp, |
- frame_size_.width(), |
- frame_size_.height(), |
inputImage._length); |
// Mask against 30 bits, to avoid (undefined) wraparound on signed integer. |
next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST; |
@@ -365,12 +362,11 @@ void RTCVideoDecoder::PictureReady(const media::Picture& picture) { |
const media::PictureBuffer& pb = it->second; |
// Create a media::VideoFrame. |
- uint32_t timestamp = 0, width = 0, height = 0; |
+ uint32_t timestamp = 0; |
size_t size = 0; |
- GetBufferData( |
- picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); |
+ GetBufferData(picture.bitstream_buffer_id(), ×tamp); |
scoped_refptr<media::VideoFrame> frame = |
- CreateVideoFrame(picture, pb, timestamp, width, height, size); |
+ CreateVideoFrame(picture, pb, timestamp, size); |
Pawel Osciak
2014/08/12 08:50:40
size is always zero... Looks like this variable wa
kcwu
2014/08/13 14:27:19
Done.
|
bool inserted = |
picture_buffers_at_display_.insert(std::make_pair( |
picture.picture_buffer_id(), |
@@ -380,7 +376,8 @@ void RTCVideoDecoder::PictureReady(const media::Picture& picture) { |
// Create a WebRTC video frame. |
webrtc::RefCountImpl<NativeHandleImpl>* handle = |
new webrtc::RefCountImpl<NativeHandleImpl>(frame); |
- webrtc::TextureVideoFrame decoded_image(handle, width, height, timestamp, 0); |
+ webrtc::TextureVideoFrame decoded_image( |
+ handle, picture.size().width(), picture.size().height(), timestamp, 0); |
// Invoke decode callback. WebRTC expects no callback after Reset or Release. |
{ |
@@ -424,10 +421,8 @@ scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( |
const media::Picture& picture, |
const media::PictureBuffer& pb, |
uint32_t timestamp, |
- uint32_t width, |
- uint32_t height, |
size_t size) { |
- gfx::Rect visible_rect(width, height); |
+ gfx::Rect visible_rect(picture.size()); |
DCHECK(decoder_texture_target_); |
// Convert timestamp from 90KHz to ms. |
base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( |
@@ -777,18 +772,13 @@ void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { |
} |
void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, |
- uint32_t* timestamp, |
- uint32_t* width, |
- uint32_t* height, |
- size_t* size) { |
+ uint32_t* timestamp) { |
for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); |
it != input_buffer_data_.end(); |
++it) { |
if (it->bitstream_buffer_id != bitstream_buffer_id) |
continue; |
*timestamp = it->timestamp; |
- *width = it->width; |
- *height = it->height; |
return; |
} |
NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; |