| Index: media/cast/video_receiver/video_receiver.cc
|
| diff --git a/media/cast/video_receiver/video_receiver.cc b/media/cast/video_receiver/video_receiver.cc
|
| index 4af2c5316247cbbd4b58edd970b1c9d010958ea8..df17a537dc2fc1e7a22946264463adb03e340afc 100644
|
| --- a/media/cast/video_receiver/video_receiver.cc
|
| +++ b/media/cast/video_receiver/video_receiver.cc
|
| @@ -94,15 +94,16 @@ void VideoReceiver::GetRawVideoFrame(
|
|
|
| void VideoReceiver::DecodeEncodedVideoFrame(
|
| const VideoFrameDecodedCallback& callback,
|
| - scoped_ptr<transport::EncodedVideoFrame> encoded_frame,
|
| - const base::TimeTicks& playout_time) {
|
| + scoped_ptr<transport::EncodedFrame> encoded_frame) {
|
| DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
|
| if (!encoded_frame) {
|
| - callback.Run(make_scoped_refptr<VideoFrame>(NULL), playout_time, false);
|
| + callback.Run(
|
| + make_scoped_refptr<VideoFrame>(NULL), base::TimeTicks(), false);
|
| return;
|
| }
|
| const uint32 frame_id = encoded_frame->frame_id;
|
| const uint32 rtp_timestamp = encoded_frame->rtp_timestamp;
|
| + const base::TimeTicks playout_time = encoded_frame->reference_time;
|
| video_decoder_->DecodeFrame(encoded_frame.Pass(),
|
| base::Bind(&VideoReceiver::EmitRawVideoFrame,
|
| cast_environment_,
|
| @@ -138,8 +139,7 @@ void VideoReceiver::EmitRawVideoFrame(
|
| callback.Run(video_frame, playout_time, is_continuous);
|
| }
|
|
|
| -void VideoReceiver::GetEncodedVideoFrame(
|
| - const VideoFrameEncodedCallback& callback) {
|
| +void VideoReceiver::GetEncodedVideoFrame(const FrameEncodedCallback& callback) {
|
| DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
|
| frame_request_queue_.push_back(callback);
|
| EmitAvailableEncodedFrames();
|
| @@ -152,8 +152,8 @@ void VideoReceiver::EmitAvailableEncodedFrames() {
|
| // Attempt to peek at the next completed frame from the |framer_|.
|
| // TODO(miu): We should only be peeking at the metadata, and not copying the
|
| // payload yet! Or, at least, peek using a StringPiece instead of a copy.
|
| - scoped_ptr<transport::EncodedVideoFrame> encoded_frame(
|
| - new transport::EncodedVideoFrame());
|
| + scoped_ptr<transport::EncodedFrame> encoded_frame(
|
| + new transport::EncodedFrame());
|
| bool is_consecutively_next_frame = false;
|
| if (!framer_.GetEncodedVideoFrame(encoded_frame.get(),
|
| &is_consecutively_next_frame)) {
|
| @@ -201,8 +201,8 @@ void VideoReceiver::EmitAvailableEncodedFrames() {
|
| encoded_frame->data.swap(decrypted_video_data);
|
| }
|
|
|
| - // At this point, we have a decrypted EncodedVideoFrame ready to be emitted.
|
| - encoded_frame->codec = codec_;
|
| + // At this point, we have a decrypted EncodedFrame ready to be emitted.
|
| + encoded_frame->reference_time = playout_time;
|
| framer_.ReleaseFrame(encoded_frame->frame_id);
|
| // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
|
| TRACE_EVENT_INSTANT2(
|
| @@ -214,8 +214,7 @@ void VideoReceiver::EmitAvailableEncodedFrames() {
|
| cast_environment_->PostTask(CastEnvironment::MAIN,
|
| FROM_HERE,
|
| base::Bind(frame_request_queue_.front(),
|
| - base::Passed(&encoded_frame),
|
| - playout_time));
|
| + base::Passed(&encoded_frame)));
|
| frame_request_queue_.pop_front();
|
| }
|
| }
|
|
|